code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package edu.depauw.escalator
import swing._
import Dialog._
import java.io._
import javax.swing._
import javax.swing.text._
import javax.swing.text.html.HTMLEditorKit
import javax.swing.undo._
import javax.swing.event.{DocumentListener, DocumentEvent, UndoableEditListener, UndoableEditEvent, HyperlinkListener, HyperlinkEvent}
import java.awt.{Color, Dimension}
import java.awt.event._
sealed trait ParseResult
case class RegularLine(s: String) extends ParseResult // hello this is regular text
case class EvalExpr(expr: String) extends ParseResult // = f
case class SavedEvalExpr(expr: String) extends ParseResult // == f
case class Decl(decl: String) extends ParseResult // > val x = f
case class Expr(expr: String) extends ParseResult // > f
class Editor(frame: EscalatorFrame) extends scala.swing.Component {
/* undo helpers */
val undoAction = new UndoAction()
val redoAction = new RedoAction()
val undo = new UndoManager()
/* components */
override lazy val peer = new JTextPane {
override def paint(g: java.awt.Graphics) = {
super.paint(g)
frame.numPanel.repaint()
}
override def getPreferredSize(): Dimension = {
return new Dimension(frame.editorWidth,super.getPreferredSize().getHeight().asInstanceOf[Int])
}
}
val textPane = peer
val chooser = frame.chooser
/* styleDoc and styles */
private val styleDoc = new EscalatorDocument(this)
textPane.setStyledDocument(styleDoc)
val default = StyleContext.getDefaultStyleContext.getStyle(StyleContext.DEFAULT_STYLE)
StyleConstants.setFontFamily(default, Escalator.font.getFamily)
StyleConstants.setFontSize(default, Escalator.font.getSize)
textPane.setLogicalStyle(default)
styleDoc.addUndoableEditListener(new MyUndoableEditListener)
def closeDocument(exit : Boolean) = styleDoc.closeDocument(exit)
def openFile(file : java.io.File) = styleDoc.openFile(file)
def newDocument() = styleDoc.newDocument()
def openDocument() = styleDoc.openDocument()
def saveDocument() = styleDoc.saveDocument()
def saveDocumentAs() = styleDoc.saveDocumentAs()
/*
* Actions
*/
private def setInput(keystroke: KeyStroke, name: String) {
textPane.getInputMap().put(keystroke, name)
}
private def setAction(name:String, f: ActionEvent => Unit) {
textPane.getActionMap().put(name, new AbstractAction(name) {
def actionPerformed(evt: ActionEvent){
f(evt)
}
})
}
private var inputCounter = 0
def mapAction(keystroke: KeyStroke, f: ActionEvent => Unit) {
val n = "action" + inputCounter
setAction(n, f)
setInput(keystroke, n)
inputCounter += 1
}
mapAction(KeyStroke.getKeyStroke("ENTER"), x => styleDoc.newline())
mapAction(KeyStroke.getKeyStroke("shift ENTER"), x => styleDoc.continue())
// remap delete
//setAction(DefaultEditorKit.deletePrevCharAction, delete)
def exit():Boolean = {
if (styleDoc.closeDocument( true )) {
System.exit(0)
true
} else false
}
def changeFontSize(newSize : Int) {
StyleConstants.setFontSize(default, newSize)
textPane.setLogicalStyle(default)
styleDoc.setCharacterAttributes(0, styleDoc.getLength + 1, default, false);
Escalator.font = new java.awt.Font(Escalator.font.getFamily, Escalator.font.getStyle, newSize)
styleDoc.insertString(textPane.getCaretPosition, " ", default)
styleDoc.remove(textPane.getCaretPosition - 1, 1) // HACK!!!
styleDoc.reevaluate()
styleDoc.updateComponentFonts()
}
def showStatus(msg : String) {
frame.log.text = msg
}
def markSaved(flag : Boolean) {
if (flag) {
frame.title = "Escalator - " + styleDoc.fileName
} else {
frame.title = "Escalator - *" + styleDoc.fileName
}
if (Escalator.onOSX) {
val root = frame.peer.getRootPane
root.putClientProperty("Window.documentModified", !flag)
root.putClientProperty("Window.documentFile", styleDoc.saveFile getOrElse null)
}
}
/*
* About frame
*/
def createAbout() = {
val about = new Frame() {
preferredSize = new Dimension(360, 180)
// peer.setResizable( false )
title = "About Escalator"
val mainPanel = new BorderPanel
val flowPanel = new FlowPanel {
contents += new Button("OK") {
reactions += {
case swing.event.ButtonClicked(_) => dispose()
}
}
}
object aboutPane extends scala.swing.Component with HyperlinkListener {
override lazy val peer = new JEditorPane() {
setContentType("text/html")
setEditorKit( new HTMLEditorKit())
val tempField = new JTextField()
setBorder(tempField.getBorder())
setText("<html>" + "<body>" + "Escalator" + "<br>" +
"version Alpha 1.0" + "<br>" + "<br>" +
"Developed at DePauw University by" + "<br>" +
"Chris Monsanto, Jeremy Kings and Dr. Brian Howard")
setEditable( false )
}
def hyperlinkUpdate(e: HyperlinkEvent) {
if (e.getEventType() == HyperlinkEvent.EventType.ACTIVATED) {
BareBonesBrowserLaunch.openURL(e.getURL().toString())
}
}
}
mainPanel.layout(aboutPane) = BorderPanel.Position.Center
mainPanel.layout(flowPanel) = BorderPanel.Position.South
contents = mainPanel
}
about.pack()
about.visible = true
}
/*
* Undo Classes & Methods
*/
class UndoAction extends AbstractAction("Undo") {
setEnabled(false)
def actionPerformed(e: ActionEvent) {
try {
undo.undo()
} catch {
case ex:CannotUndoException =>
//println("Unable to undo: " + ex)
//ex.printStackTrace()
}
updateUndoState()
redoAction.updateRedoState()
}
def updateUndoState() {
if (undo.canUndo()) {
setEnabled(true)
putValue(javax.swing.Action.NAME, undo.getUndoPresentationName())
} else {
setEnabled(false)
putValue(javax.swing.Action.NAME, "Undo")
}
}
}
class RedoAction extends AbstractAction("Redo") {
setEnabled(false)
def actionPerformed(e: ActionEvent) {
try {
undo.redo()
} catch {
case ex:CannotRedoException =>
//println("Unable to redo: " + ex)
//ex.printStackTrace()
}
updateRedoState()
undoAction.updateUndoState()
}
def updateRedoState() {
if (undo.canRedo()) {
setEnabled(true)
putValue(javax.swing.Action.NAME, undo.getRedoPresentationName())
} else {
setEnabled(false)
putValue(javax.swing.Action.NAME, "Redo")
}
}
}
// Listens for edits that can be undone.
protected class MyUndoableEditListener extends UndoableEditListener {
def undoableEditHappened(e: UndoableEditEvent) {
//Remember the edit and update the menus.
undo.addEdit(e.getEdit())
undoAction.updateUndoState()
redoAction.updateRedoState()
}
}
}
| bhoward/EscalatorOld | Escalator/src/edu/depauw/escalator/Editor.scala | Scala | apache-2.0 | 7,020 |
package org.jetbrains.plugins.scala
package lang
package psi
package stubs
package elements
import com.intellij.psi.PsiElement
import com.intellij.psi.stubs.{IndexSink, StubElement, StubInputStream, StubOutputStream}
import com.intellij.util.io.StringRef
import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScTypeAlias, ScTypeAliasDeclaration, ScTypeAliasDefinition}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScObject
import org.jetbrains.plugins.scala.lang.psi.stubs.impl.ScTypeAliasStubImpl
import org.jetbrains.plugins.scala.lang.psi.stubs.index.ScalaIndexKeys
/**
* User: Alexander Podkhalyuzin
* Date: 18.10.2008
*/
abstract class ScTypeAliasElementType[Func <: ScTypeAlias](debugName: String)
extends ScStubElementType[ScTypeAliasStub, ScTypeAlias](debugName) {
def createStubImpl[ParentPsi <: PsiElement](psi: ScTypeAlias, parentStub: StubElement[ParentPsi]): ScTypeAliasStub = {
val isDeclaration = psi.isInstanceOf[ScTypeAliasDeclaration]
val typeElementText = {
if (isDeclaration) ""
else {
psi.asInstanceOf[ScTypeAliasDefinition].aliasedTypeElement.getText
}
}
val lower = {
if (!isDeclaration) ""
else psi.asInstanceOf[ScTypeAliasDeclaration].lowerTypeElement.map(_.getText).getOrElse("")
}
val upper = {
if (!isDeclaration) ""
else psi.asInstanceOf[ScTypeAliasDeclaration].upperTypeElement.map(_.getText).getOrElse("")
}
val containingClass = psi.containingClass
val isStableQualifier = ScalaPsiUtil.hasStablePath(psi) && containingClass.isInstanceOf[ScObject]
new ScTypeAliasStubImpl[ParentPsi](parentStub, this, psi.name, isDeclaration, typeElementText, lower, upper,
containingClass == null, isStableQualifier)
}
def serialize(stub: ScTypeAliasStub, dataStream: StubOutputStream) {
dataStream.writeName(stub.getName)
dataStream.writeBoolean(stub.isDeclaration)
dataStream.writeName(stub.getTypeElementText)
dataStream.writeName(stub.getLowerBoundElementText)
dataStream.writeName(stub.getUpperBoundElementText)
dataStream.writeBoolean(stub.isLocal)
dataStream.writeBoolean(stub.isStableQualifier)
}
def deserializeImpl(dataStream: StubInputStream, parentStub: Any): ScTypeAliasStub = {
val name = StringRef.toString(dataStream.readName)
val isDecl = dataStream.readBoolean
val parent = parentStub.asInstanceOf[StubElement[PsiElement]]
val typeElementText = dataStream.readName.toString
val lower = dataStream.readName.toString
val upper = dataStream.readName.toString
val isLocal = dataStream.readBoolean()
val isStable = dataStream.readBoolean()
new ScTypeAliasStubImpl(parent, this, name, isDecl, typeElementText, lower, upper, isLocal, isStable)
}
def indexStub(stub: ScTypeAliasStub, sink: IndexSink) {
val name = stub.getName
if (name != null) {
sink.occurrence(ScalaIndexKeys.TYPE_ALIAS_NAME_KEY, name)
if (stub.isStableQualifier) {
sink.occurrence(ScalaIndexKeys.STABLE_ALIAS_NAME_KEY, name)
}
}
}
} | triggerNZ/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/stubs/elements/ScTypeAliasElementType.scala | Scala | apache-2.0 | 3,082 |
package com.arcusys.valamis.updaters.version320
import com.arcusys.valamis.persistence.common.DatabaseLayer
import com.arcusys.valamis.updaters.common.BaseDBUpdater
import com.arcusys.valamis.updaters.schema.v3201.LessonTableComponent
class DBUpdater3201 extends BaseDBUpdater
with LessonTableComponent
with DatabaseLayer {
override def getThreshold = 3201
import driver.api._
override def doUpgrade(): Unit = {
execSync(invisibleLessonViewers.schema.create)
}
}
| arcusys/Valamis | valamis-updaters/src/main/scala/com/arcusys/valamis/updaters/version320/DBUpdater3201.scala | Scala | gpl-3.0 | 486 |
package org.singlespaced.d3js
import org.scalajs.dom
import org.singlespaced.d3js.Ops._
import utest._
import scala.scalajs.js
object SvgAreaTest extends TestSuite {
val elem=dom.document.createElement("svg")
dom.document.body.appendChild(elem)
val tests = TestSuite {
'd3_area_xy {
//arrange
case class TestDatum(v:Int)
val data=js.Array[TestDatum](TestDatum(1),TestDatum(4))
val testee = d3.svg.area[TestDatum]()
//act
val result=testee
.x((d:TestDatum,i:Int) => d.v.toDouble )
.y((d:TestDatum,i:Int) => d.v.toDouble )
//assert
d3.select(elem).datum(data)
.append("path")
.attr("d", result)
val actual=dom.document.getElementsByTagName("path")
assert( actual.length == 1)
assert( actual.item(0).asInstanceOf[dom.Element].getAttribute("d") =="M1,1L4,4L4,4L1,1Z")
}
'd3_area_radial {
val area = d3.svg.area.radial().interpolate("basis").tension(0).radius(100)
}
}
}
| spaced/scala-js-d3 | src/test/scala/org/singlespaced/d3js/SvgAreaTest.scala | Scala | bsd-3-clause | 1,008 |
/*
* This file is part of Sloth, an SMT solver for strings.
* Copyright (C) 2017 Philipp Ruemmer, Petr Janku
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package strsolver
import ap._
import ap.parser._
import ap.terfor.conjunctions.Conjunction
import ap.terfor.preds.Predicate
import ap.theories._
object SMTLIBStringTheory extends Theory {
override def toString = "SMTLIBStringTheory"
// TODO: use proper sorts for the operations
// Sequences
val seq_unit = new IFunction("seq_unit", 1, true, false)
val seq_empty = new IFunction("seq_empty", 0, true, false)
val seq_concat = new IFunction("seq_concat", 2, true, false)
val seq_cons = new IFunction("seq_cons", 2, true, false)
val seq_rev_cons = new IFunction("seq_rev_cons", 2, true, false)
val seq_head = new IFunction("seq_head", 1, true, false)
val seq_tail = new IFunction("seq_tail", 1, true, false)
val seq_last = new IFunction("seq_last", 1, true, false)
val seq_first = new IFunction("seq_first", 1, true, false)
val seq_prefix_of = new Predicate("seq_prefix_of", 2)
val seq_suffix_of = new Predicate("seq_suffix_of", 2)
val seq_subseq_of = new Predicate("seq_subseq_of", 2)
val seq_extract = new IFunction("seq_extract", 3, true, false)
val seq_nth = new IFunction("seq_nth", 2, true, false)
val seq_length = new IFunction("seq_length", 1, true, false)
val seq_replace = new IFunction("seq_replace", 3, true, false)
val seq_replace_all = new IFunction("seq_replace_all", 3, true, false)
// Regexes
val re_empty_set = new IFunction("re_empty_set", 0, true, false)
val re_full_set = new IFunction("re_full_set", 0, true, false)
val re_allchar = new IFunction("re.allchar", 0, true, false)
val re_concat = new IFunction("re_concat", 2, true, false)
val re_of_seq = new IFunction("re_of_seq", 1, true, false)
val re_empty_seq = new IFunction("re_empty_seq", 0, true, false)
val re_star = new IFunction("re_star", 1, true, false)
val re_loop = new IFunction("re_loop", 3, true, false)
val re_plus = new IFunction("re_plus", 1, true, false)
val re_option = new IFunction("re_option", 1, true, false)
val re_range = new IFunction("re_range", 2, true, false)
val re_union = new IFunction("re_union", 2, true, false)
val re_difference = new IFunction("re_difference", 2, true, false)
val re_intersect = new IFunction("re_intersect", 2, true, false)
val re_complement = new IFunction("re_complement", 1, true, false)
val re_of_pred = new IFunction("re_of_pred", 1, true, false)
val re_member = new Predicate("re_member", 2)
//////////////////////////////////////////////////////////////////////////////
val functions = List(seq_unit, seq_empty, seq_concat,
seq_cons, seq_rev_cons, seq_head, seq_tail, seq_last,
seq_first, seq_extract, seq_nth, seq_length,
re_empty_set, re_full_set, re_allchar, re_concat,
re_of_seq, re_empty_seq,
re_star, re_loop, re_plus, re_option, re_range,
re_union, re_difference, re_intersect, re_complement,
re_of_pred, seq_replace, seq_replace_all)
val (predicates, functionPredicateMapping, functionalPredicates) = {
val functionEnc = new FunctionEncoder (true, false)
val predicates = for (f <- functions) yield (functionEnc addFunction f)
val allPredicates =
List(seq_prefix_of, seq_suffix_of, seq_subseq_of, re_member) ::: predicates
(allPredicates,
functions zip predicates,
predicates.toSet)
}
val axioms = Conjunction.TRUE
val totalityAxioms = Conjunction.TRUE
val predicateMatchConfig : Signature.PredicateMatchConfig = Map()
val triggerRelevantFunctions : Set[IFunction] = functions.toSet
def plugin = None
TheoryRegistry register this
}
| uuverifiers/sloth | src/main/scala/SMTLIBStringTheory.scala | Scala | gpl-3.0 | 4,711 |
/*
* Copyright (C) 2016 University of Basel, Graphics and Vision Research Group
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package scalismo.ui.control
import scalismo.geometry.{Point, Point3D}
import scalismo.ui.control.SlicingPosition.event
import scalismo.ui.control.SlicingPosition.renderable.BoundingBoxRenderable
import scalismo.ui.event.{Event, ScalismoPublisher}
import scalismo.ui.model.{BoundingBox, Renderable, Scene}
import scalismo.ui.settings.GlobalSettings
import scalismo.ui.view._
object SlicingPosition {
object event {
case class VisibilityChanged(source: SlicingPosition) extends Event
case class PointChanged(source: SlicingPosition, previous: Point3D, current: Point3D) extends Event
case class BoundingBoxChanged(source: SlicingPosition) extends Event
case class PerspectiveChanged(source: SlicingPosition) extends Event
}
object renderable {
class BoundingBoxRenderable(val source: SlicingPosition) extends Renderable
}
}
class SlicingPosition(val scene: Scene, val frame: ScalismoFrame) extends ScalismoPublisher {
private var _visible = GlobalSettings.get[Boolean](GlobalSettings.Keys.SlicingPositionShow).getOrElse(false)
def visible: Boolean = _visible
def visible_=(newVisible: Boolean): Unit = {
if (_visible != newVisible) {
_visible = newVisible
GlobalSettings.set(GlobalSettings.Keys.SlicingPositionShow, newVisible)
publishEvent(event.VisibilityChanged(this))
//scene.publishEdt(Scene.SlicingPosition.SlicesVisibleChanged(this))
//scene.publishEdt(Scene.VisibilityChanged(scene))
}
}
private var _point: Point3D = Point3D(0, 0, 0)
def point: Point3D = {
_point
}
def point_=(np: Point3D): Unit = {
if (_point != np) {
val prev = _point
_point = np
publishEvent(event.PointChanged(this, prev, np))
}
}
def x: Double = point(0)
def y: Double = point(1)
def z: Double = point(2)
def x_=(nv: Float): Unit = {
val sv = Math.min(Math.max(boundingBox.xMin, nv), boundingBox.xMax)
if (x != sv) {
point_=(Point(sv, y, z))
}
}
def y_=(nv: Float): Unit = {
val sv = Math.min(Math.max(boundingBox.yMin, nv), boundingBox.yMax)
if (y != sv) {
point = Point(x, sv, z)
}
}
def z_=(nv: Float): Unit = {
val sv = Math.min(Math.max(boundingBox.zMin, nv), boundingBox.zMax)
if (z != sv) {
point = Point(x, y, sv)
}
}
private def sanitizePoint(): Unit = {
val sx = Math.min(Math.max(boundingBox.xMin, x), boundingBox.xMax)
val sy = Math.min(Math.max(boundingBox.yMin, y), boundingBox.yMax)
val sz = Math.min(Math.max(boundingBox.zMin, z), boundingBox.zMax)
point = Point(sx, sy, sz)
}
private var _boundingBox: BoundingBox = BoundingBox.Invalid
def boundingBox: BoundingBox = _boundingBox
private def boundingBox_=(nb: BoundingBox): Unit = {
if (_boundingBox != nb) {
val wasInvalid = _boundingBox == BoundingBox.Invalid
_boundingBox = nb
publishEvent(event.BoundingBoxChanged(this))
if (wasInvalid) center()
sanitizePoint()
}
}
def viewports: List[ViewportPanel] = frame.perspective.viewports
private def updateBoundingBox(): Unit = {
boundingBox = viewports.foldLeft(BoundingBox.Invalid: BoundingBox)({
case (bb, vp) =>
bb.union(vp.currentBoundingBox)
})
}
private def perspectiveChanged(): Unit = {
viewports.foreach(vp => listenTo(vp))
updateBoundingBox()
publishEvent(event.PerspectiveChanged(this))
}
def center(): Unit = {
point = boundingBox.center
}
reactions += {
case PerspectivePanel.event.PerspectiveChanged(_, _, _) => perspectiveChanged()
case ViewportPanel.event.BoundingBoxChanged(_) => updateBoundingBox()
case ViewportPanel.event.Detached(vp) => deafTo(vp)
}
def initialize(): Unit = {
listenTo(frame.perspective)
perspectiveChanged()
}
// renderables
private lazy val boundingBoxRenderable = new BoundingBoxRenderable(this)
def renderablesFor(viewport: ViewportPanel): List[Renderable] = {
viewport match {
case _: ViewportPanel3D => List(boundingBoxRenderable)
case _: ViewportPanel2D => List(boundingBoxRenderable)
}
}
}
| unibas-gravis/scalismo-ui | src/main/scala/scalismo/ui/control/SlicingPosition.scala | Scala | gpl-3.0 | 4,893 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import java.io.File
import SharedHelpers.{createTempDirectory, thisLineNumber}
import enablers.Existence
import org.scalactic.Prettifier
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers._
class ShouldExistLogicalOrImplicitSpec extends AnyFunSpec {
private val prettifier = Prettifier.default
trait Thing {
def exist: Boolean
}
val something = new Thing {
val exist = true
}
val nothing = new Thing {
val exist = false
}
implicit def existenceOfThing[T <: Thing]: Existence[T] =
new Existence[T] {
def exists(thing: T): Boolean = thing.exist
}
val fileName = "ShouldExistLogicalOrImplicitSpec.scala"
def doesNotExist(left: Any): String =
FailureMessages.doesNotExist(prettifier, left)
def exists(left: Any): String =
FailureMessages.exists(prettifier, left)
def wasEqualTo(left: Any, right: Any): String =
FailureMessages.wasEqualTo(prettifier, left, right)
def wasNotEqualTo(left: Any, right: Any): String =
FailureMessages.wasNotEqualTo(prettifier, left, right)
def equaled(left: Any, right: Any): String =
FailureMessages.equaled(prettifier, left, right)
def didNotEqual(left: Any, right: Any): String =
FailureMessages.didNotEqual(prettifier, left, right)
def allError(left: Any, message: String, lineNumber: Int): String = {
val messageWithIndex = UnquotedString(" " + FailureMessages.forAssertionsGenTraversableMessageWithStackDepth(prettifier, 0, UnquotedString(message), UnquotedString(fileName + ":" + lineNumber)))
FailureMessages.allShorthandFailed(prettifier, messageWithIndex, left)
}
describe("The exist syntax when used with File") {
it("should do nothing when the file exists") {
something should (equal (something) or exist)
something should (equal (nothing) or exist)
nothing should (equal (nothing) or exist)
something should (exist or equal (something))
nothing should (exist or equal (nothing))
something should (exist or equal (nothing))
something should (be (something) or exist)
something should (be (nothing) or exist)
nothing should (be (nothing) or exist)
something should (exist or be (something))
nothing should (exist or be (nothing))
something should (exist or be (nothing))
}
it("should throw TFE with correct stack depth and message when the file does not exist") {
val e1 = intercept[exceptions.TestFailedException] {
nothing should (equal (something) or exist)
}
assert(e1.message === Some(didNotEqual(nothing, something) + ", and " + doesNotExist(nothing)))
assert(e1.failedCodeFileName === Some(fileName))
assert(e1.failedCodeLineNumber === Some(thisLineNumber - 4))
val e2 = intercept[exceptions.TestFailedException] {
nothing should (exist or equal (something))
}
assert(e2.message === Some(doesNotExist(nothing) + ", and " + didNotEqual(nothing, something)))
assert(e2.failedCodeFileName === Some(fileName))
assert(e2.failedCodeLineNumber === Some(thisLineNumber - 4))
val e3 = intercept[exceptions.TestFailedException] {
nothing should (be (something) or exist)
}
assert(e3.message === Some(wasNotEqualTo(nothing, something) + ", and " + doesNotExist(nothing)))
assert(e3.failedCodeFileName === Some(fileName))
assert(e3.failedCodeLineNumber === Some(thisLineNumber - 4))
val e4 = intercept[exceptions.TestFailedException] {
nothing should (exist or be (something))
}
assert(e4.message === Some(doesNotExist(nothing) + ", and " + wasNotEqualTo(nothing, something)))
assert(e4.failedCodeFileName === Some(fileName))
assert(e4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
it("should do nothing when it is used with not and the file does not exists") {
nothing should (equal (nothing) or not (exist))
nothing should (equal (something) or not (exist))
something should (equal (something) or not (exist))
nothing should (not (exist) or equal (nothing))
something should (not (exist) or equal (something))
nothing should (not (exist) or equal (something))
nothing should (be (nothing) or not (exist))
nothing should (be (something) or not (exist))
something should (be (something) or not (exist))
nothing should (not (exist) or be (nothing))
something should (not (exist) or be (something))
nothing should (not (exist) or be (something))
}
it("should throw TFE with correct stack depth and message when it is used with not and the file exists") {
val e1 = intercept[exceptions.TestFailedException] {
something should (equal (nothing) or not (exist))
}
assert(e1.message === Some(didNotEqual(something, nothing) + ", and " + exists(something)))
assert(e1.failedCodeFileName === Some(fileName))
assert(e1.failedCodeLineNumber === Some(thisLineNumber - 4))
val e2 = intercept[exceptions.TestFailedException] {
something should (not (exist) or equal (nothing))
}
assert(e2.message === Some(exists(something) + ", and " + didNotEqual(something, nothing)))
assert(e2.failedCodeFileName === Some(fileName))
assert(e2.failedCodeLineNumber === Some(thisLineNumber - 4))
val e3 = intercept[exceptions.TestFailedException] {
something should (be (nothing) or not (exist))
}
assert(e3.message === Some(wasNotEqualTo(something, nothing) + ", and " + exists(something)))
assert(e3.failedCodeFileName === Some(fileName))
assert(e3.failedCodeLineNumber === Some(thisLineNumber - 4))
val e4 = intercept[exceptions.TestFailedException] {
something should (not (exist) or be (nothing))
}
assert(e4.message === Some(exists(something) + ", and " + wasNotEqualTo(something, nothing)))
assert(e4.failedCodeFileName === Some(fileName))
assert(e4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
describe("The exist syntax when used with all(xs)") {
it("should do nothing when the file exists") {
all(List(something)) should (equal (something) or exist)
all(List(something)) should (equal (nothing) or exist)
all(List(nothing)) should (equal (nothing) or exist)
all(List(something)) should (exist or equal (something))
all(List(nothing)) should (exist or equal (nothing))
all(List(something)) should (exist or equal (nothing))
all(List(something)) should (be (something) or exist)
all(List(something)) should (be (nothing) or exist)
all(List(nothing)) should (be (nothing) or exist)
all(List(something)) should (exist or be (something))
all(List(nothing)) should (exist or be (nothing))
all(List(something)) should (exist or be (nothing))
}
it("should throw TFE with correct stack depth and message when the file does not exist") {
val left1 = List(nothing)
val e1 = intercept[exceptions.TestFailedException] {
all(left1) should (equal (something) or exist)
}
assert(e1.message === Some(allError(left1, didNotEqual(nothing, something) + ", and " + doesNotExist(nothing), thisLineNumber - 2)))
assert(e1.failedCodeFileName === Some(fileName))
assert(e1.failedCodeLineNumber === Some(thisLineNumber - 4))
val left2 = List(nothing)
val e2 = intercept[exceptions.TestFailedException] {
all(left2) should (exist or equal (something))
}
assert(e2.message === Some(allError(left2, doesNotExist(nothing) + ", and " + didNotEqual(nothing, something), thisLineNumber - 2)))
assert(e2.failedCodeFileName === Some(fileName))
assert(e2.failedCodeLineNumber === Some(thisLineNumber - 4))
val left3 = List(nothing)
val e3 = intercept[exceptions.TestFailedException] {
all(left3) should (be (something) or exist)
}
assert(e3.message === Some(allError(left3, wasNotEqualTo(nothing, something) + ", and " + doesNotExist(nothing), thisLineNumber - 2)))
assert(e3.failedCodeFileName === Some(fileName))
assert(e3.failedCodeLineNumber === Some(thisLineNumber - 4))
val left4 = List(nothing)
val e4 = intercept[exceptions.TestFailedException] {
all(left4) should (exist or be (something))
}
assert(e4.message === Some(allError(left4, doesNotExist(nothing) + ", and " + wasNotEqualTo(nothing, something), thisLineNumber - 2)))
assert(e4.failedCodeFileName === Some(fileName))
assert(e4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
it("should do nothing when it is used with not and the file does not exists") {
all(List(nothing)) should (equal (nothing) or not (exist))
all(List(nothing)) should (equal (something) or not (exist))
all(List(something)) should (equal (something) or not (exist))
all(List(nothing)) should (not (exist) or equal (nothing))
all(List(something)) should (not (exist) or equal (something))
all(List(nothing)) should (not (exist) or equal (something))
all(List(nothing)) should (be (nothing) or not (exist))
all(List(nothing)) should (be (something) or not (exist))
all(List(something)) should (be (something) or not (exist))
all(List(nothing)) should (not (exist) or be (nothing))
all(List(something)) should (not (exist) or be (something))
all(List(nothing)) should (not (exist) or be (something))
}
it("should throw TFE with correct stack depth and message when it is used with not and the file exists") {
val left1 = List(something)
val e1 = intercept[exceptions.TestFailedException] {
all(left1) should (equal (nothing) or not (exist))
}
assert(e1.message === Some(allError(left1, didNotEqual(something, nothing) + ", and " + exists(something), thisLineNumber - 2)))
assert(e1.failedCodeFileName === Some(fileName))
assert(e1.failedCodeLineNumber === Some(thisLineNumber - 4))
val left2 = List(something)
val e2 = intercept[exceptions.TestFailedException] {
all(left2) should (not (exist) or equal (nothing))
}
assert(e2.message === Some(allError(left2, exists(something) + ", and " + didNotEqual(something, nothing), thisLineNumber - 2)))
assert(e2.failedCodeFileName === Some(fileName))
assert(e2.failedCodeLineNumber === Some(thisLineNumber - 4))
val left3 = List(something)
val e3 = intercept[exceptions.TestFailedException] {
all(left3) should (be (nothing) or not (exist))
}
assert(e3.message === Some(allError(left3, wasNotEqualTo(something, nothing) + ", and " + exists(something), thisLineNumber - 2)))
assert(e3.failedCodeFileName === Some(fileName))
assert(e3.failedCodeLineNumber === Some(thisLineNumber - 4))
val left4 = List(something)
val e4 = intercept[exceptions.TestFailedException] {
all(left4) should (not (exist) or be (nothing))
}
assert(e4.message === Some(allError(left4, exists(something) + ", and " + wasNotEqualTo(something, nothing), thisLineNumber - 2)))
assert(e4.failedCodeFileName === Some(fileName))
assert(e4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
} | scalatest/scalatest | jvm/scalatest-test/src/test/scala/org/scalatest/ShouldExistLogicalOrImplicitSpec.scala | Scala | apache-2.0 | 12,178 |
package com.ldaniels528.broadway.core.resources
/**
* Represents a iterable resource
* @author Lawrence Daniels <[email protected]>
*/
trait IterableResource[T] extends Resource {
def iterator: Iterator[T]
}
| ldaniels528/shocktrade-broadway-server | src/main/scala/com/ldaniels528/broadway/core/resources/IterableResource.scala | Scala | apache-2.0 | 225 |
package models
import conversions._
import org.squeryl.PrimitiveTypeMode._
import org.squeryl.dsl.ast.{BinaryOperatorNodeLogicalBoolean, LogicalBoolean}
import collins.solr._
import IpmiInfo.Enum._
import util.views.Formatter
import java.util.Date
case class AssetFinder(
tag: Option[String],
status: Option[Status],
assetType: Option[AssetType],
createdAfter: Option[Date],
createdBefore: Option[Date],
updatedAfter: Option[Date],
updatedBefore: Option[Date],
state: Option[State],
query: Option[SolrExpression])
{
def asLogicalBoolean(a: Asset): LogicalBoolean = {
val tagBool = tag.map((a.tag === _))
val statusBool = status.map((a.status === _.id))
val typeBool = assetType.map((a.asset_type === _.id))
val createdAfterTs = createdAfter.map((a.created gte _.asTimestamp))
val createdBeforeTs = createdBefore.map((a.created lte _.asTimestamp))
val updatedAfterTs = Some((a.updated gte updatedAfter.map(_.asTimestamp).?))
val updatedBeforeTs = Some((a.updated lte updatedBefore.map(_.asTimestamp).?))
val stateBool = state.map((a.state === _.id))
val ops = Seq(tagBool, statusBool, typeBool, createdAfterTs, createdBeforeTs, updatedAfterTs,
updatedBeforeTs, stateBool).filter(_ != None).map(_.get)
ops.reduceRight((a,b) => new BinaryOperatorNodeLogicalBoolean(a, b, "and"))
}
/**
* converts the finder into a sequence of key/value tuples, used as part of forwarding searches
* to remote collins instances (see RemoteAssetFinder for why it's not a map)
*/
def toSeq: Seq[(String, String)] = {
val items:Seq[Option[(String, String)]] = (
tag.map("tag" -> _) ::
status.map("status" -> _.name) ::
assetType.map("type" -> _.name) ::
createdAfter.map(t => "createdAfter" -> Formatter.dateFormat(t)) ::
createdBefore.map(t => "createdBefore" -> Formatter.dateFormat(t)) ::
updatedAfter.map(t => "updatedAfter" -> Formatter.dateFormat(t)) ::
updatedBefore.map(t => "updatedBefore" -> Formatter.dateFormat(t)) ::
state.map(s => "state" -> s.name) ::
query.map{q => "query" -> "UHOH!!!!"} :: //FIXME: need toCQL traversal
Nil
)
items.flatten
}
def toSolrKeyVals = {
val items = tag.map{t => SolrKeyVal("tag", StringValueFormat.createValueFor(t))} ::
status.map{t => SolrKeyVal("status" , SolrIntValue(t.id))} ::
assetType.map(t => SolrKeyVal("assetType" , SolrIntValue(t.id))) ::
state.map(t => SolrKeyVal("state", SolrIntValue(t.id))) ::
query ::
Nil
val cOpt = (createdBefore.map{d =>SolrStringValue(Formatter.solrDateFormat(d), StrictUnquoted)}, createdAfter.map{d =>SolrStringValue(Formatter.solrDateFormat(d), StrictUnquoted)}) match {
case (None, None) => None
case (bOpt, aOpt) => Some(SolrKeyRange("created", aOpt, bOpt, true))
}
val uOpt = (updatedBefore.map{d =>SolrStringValue(Formatter.solrDateFormat(d), StrictUnquoted)}, updatedAfter.map{d
=>SolrStringValue(Formatter.solrDateFormat(d), StrictUnquoted)}) match {
case (None, None) => None
case (bOpt, aOpt) => Some(SolrKeyRange("updated", aOpt, bOpt, true))
}
(cOpt :: uOpt :: items).flatten
}
}
object AssetFinder {
val empty = AssetFinder(None,None,None,None,None,None,None,None,None)
}
| Shopify/collins | app/models/AssetFinder.scala | Scala | apache-2.0 | 3,300 |
/*============================================================================
This Chisel source file is part of a pre-release version of the HardFloat IEEE
Floating-Point Arithmetic Package, by John R. Hauser (with some contributions
from Yunsup Lee and Andrew Waterman, mainly concerning testing).
Copyright 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017 The Regents of the
University of California. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions, and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions, and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the University nor the names of its contributors may
be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS "AS IS", AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, ARE
DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
=============================================================================*/
package types
import Chisel._
object equivRecFN
{
def apply(expWidth: Int, sigWidth: Int, a: UInt, b: UInt) =
{
val top4A = a(expWidth + sigWidth, expWidth + sigWidth - 3)
val top4B = b(expWidth + sigWidth, expWidth + sigWidth - 3)
Mux((top4A(2, 0) === UInt(0)) || (top4A(2, 0) === UInt(7)),
(top4A === top4B) && (a(sigWidth - 2, 0) === b(sigWidth - 2, 0)),
Mux((top4A(2, 0) === UInt(6)), (top4A === top4B), (a === b))
)
}
}
//*** CHANGE THIS NAME (HOW??):
object FMATest {
def main(args: Array[String]): Unit =
{
val testArgs = args.slice(1, args.length)
args(0) match {
case "f16FromRecF16" =>
chiselMain(testArgs, () => Module(new ValExec_f16FromRecF16))
case "f32FromRecF32" =>
chiselMain(testArgs, () => Module(new ValExec_f32FromRecF32))
case "f64FromRecF64" =>
chiselMain(testArgs, () => Module(new ValExec_f64FromRecF64))
case "UI32ToRecF16" =>
chiselMain(testArgs, () => Module(new ValExec_UI32ToRecF16))
case "UI32ToRecF32" =>
chiselMain(testArgs, () => Module(new ValExec_UI32ToRecF32))
case "UI32ToRecF64" =>
chiselMain(testArgs, () => Module(new ValExec_UI32ToRecF64))
case "UI64ToRecF16" =>
chiselMain(testArgs, () => Module(new ValExec_UI64ToRecF16))
case "UI64ToRecF32" =>
chiselMain(testArgs, () => Module(new ValExec_UI64ToRecF32))
case "UI64ToRecF64" =>
chiselMain(testArgs, () => Module(new ValExec_UI64ToRecF64))
case "I32ToRecF16" =>
chiselMain(testArgs, () => Module(new ValExec_I32ToRecF16))
case "I32ToRecF32" =>
chiselMain(testArgs, () => Module(new ValExec_I32ToRecF32))
case "I32ToRecF64" =>
chiselMain(testArgs, () => Module(new ValExec_I32ToRecF64))
case "I64ToRecF16" =>
chiselMain(testArgs, () => Module(new ValExec_I64ToRecF16))
case "I64ToRecF32" =>
chiselMain(testArgs, () => Module(new ValExec_I64ToRecF32))
case "I64ToRecF64" =>
chiselMain(testArgs, () => Module(new ValExec_I64ToRecF64))
case "RecF16ToUI32" =>
chiselMain(testArgs, () => Module(new ValExec_RecF16ToUI32))
case "RecF16ToUI64" =>
chiselMain(testArgs, () => Module(new ValExec_RecF16ToUI64))
case "RecF32ToUI32" =>
chiselMain(testArgs, () => Module(new ValExec_RecF32ToUI32))
case "RecF32ToUI64" =>
chiselMain(testArgs, () => Module(new ValExec_RecF32ToUI64))
case "RecF64ToUI32" =>
chiselMain(testArgs, () => Module(new ValExec_RecF64ToUI32))
case "RecF64ToUI64" =>
chiselMain(testArgs, () => Module(new ValExec_RecF64ToUI64))
case "RecF16ToI32" =>
chiselMain(testArgs, () => Module(new ValExec_RecF16ToI32))
case "RecF16ToI64" =>
chiselMain(testArgs, () => Module(new ValExec_RecF16ToI64))
case "RecF32ToI32" =>
chiselMain(testArgs, () => Module(new ValExec_RecF32ToI32))
case "RecF32ToI64" =>
chiselMain(testArgs, () => Module(new ValExec_RecF32ToI64))
case "RecF64ToI32" =>
chiselMain(testArgs, () => Module(new ValExec_RecF64ToI32))
case "RecF64ToI64" =>
chiselMain(testArgs, () => Module(new ValExec_RecF64ToI64))
case "RecF16ToRecF32" =>
chiselMain(testArgs, () => Module(new ValExec_RecF16ToRecF32))
case "RecF16ToRecF64" =>
chiselMain(testArgs, () => Module(new ValExec_RecF16ToRecF64))
case "RecF32ToRecF16" =>
chiselMain(testArgs, () => Module(new ValExec_RecF32ToRecF16))
case "RecF32ToRecF64" =>
chiselMain(testArgs, () => Module(new ValExec_RecF32ToRecF64))
case "RecF64ToRecF16" =>
chiselMain(testArgs, () => Module(new ValExec_RecF64ToRecF16))
case "RecF64ToRecF32" =>
chiselMain(testArgs, () => Module(new ValExec_RecF64ToRecF32))
case "MulAddRecF16_add" =>
chiselMain(
testArgs, () => Module(new ValExec_MulAddRecF16_add))
case "MulAddRecF16_mul" =>
chiselMain(
testArgs, () => Module(new ValExec_MulAddRecF16_mul))
case "MulAddRecF16" =>
chiselMain(testArgs, () => Module(new ValExec_MulAddRecF16))
case "MulAddRecF32_add" =>
chiselMain(
testArgs, () => Module(new ValExec_MulAddRecF32_add))
case "MulAddRecF32_mul" =>
chiselMain(
testArgs, () => Module(new ValExec_MulAddRecF32_mul))
case "MulAddRecF32" =>
chiselMain(testArgs, () => Module(new ValExec_MulAddRecF32))
case "MulAddRecF64_add" =>
chiselMain(
testArgs, () => Module(new ValExec_MulAddRecF64_add))
case "MulAddRecF64_mul" =>
chiselMain(
testArgs, () => Module(new ValExec_MulAddRecF64_mul))
case "MulAddRecF64" =>
chiselMain(testArgs, () => Module(new ValExec_MulAddRecF64))
case "DivSqrtRecF16_small_div" =>
chiselMain(
testArgs, () => Module(new ValExec_DivSqrtRecF16_small_div)
)
case "DivSqrtRecF16_small_sqrt" =>
chiselMain(
testArgs,
() => Module(new ValExec_DivSqrtRecF16_small_sqrt)
)
case "DivSqrtRecF32_small_div" =>
chiselMain(
testArgs, () => Module(new ValExec_DivSqrtRecF32_small_div)
)
case "DivSqrtRecF32_small_sqrt" =>
chiselMain(
testArgs,
() => Module(new ValExec_DivSqrtRecF32_small_sqrt)
)
case "DivSqrtRecF64_small_div" =>
chiselMain(
testArgs, () => Module(new ValExec_DivSqrtRecF64_small_div)
)
case "DivSqrtRecF64_small_sqrt" =>
chiselMain(
testArgs,
() => Module(new ValExec_DivSqrtRecF64_small_sqrt)
)
case "DivSqrtRecF64_div" =>
chiselMain(
testArgs, () => Module(new ValExec_DivSqrtRecF64_div))
case "DivSqrtRecF64_sqrt" =>
chiselMain(
testArgs, () => Module(new ValExec_DivSqrtRecF64_sqrt))
case "CompareRecF16_lt" =>
chiselMain(
testArgs, () => Module(new ValExec_CompareRecF16_lt))
case "CompareRecF16_le" =>
chiselMain(
testArgs, () => Module(new ValExec_CompareRecF16_le))
case "CompareRecF16_eq" =>
chiselMain(
testArgs, () => Module(new ValExec_CompareRecF16_eq))
case "CompareRecF32_lt" =>
chiselMain(
testArgs, () => Module(new ValExec_CompareRecF32_lt))
case "CompareRecF32_le" =>
chiselMain(
testArgs, () => Module(new ValExec_CompareRecF32_le))
case "CompareRecF32_eq" =>
chiselMain(
testArgs, () => Module(new ValExec_CompareRecF32_eq))
case "CompareRecF64_lt" =>
chiselMain(
testArgs, () => Module(new ValExec_CompareRecF64_lt))
case "CompareRecF64_le" =>
chiselMain(
testArgs, () => Module(new ValExec_CompareRecF64_le))
case "CompareRecF64_eq" =>
chiselMain(
testArgs, () => Module(new ValExec_CompareRecF64_eq))
}
}
}
| stanford-ppl/spatial-lang | spatial/core/resources/chiselgen/template-level/templates/hardfloat/tests.scala | Scala | mit | 10,167 |
/*
* Copyright 2017 Exon IT
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package by.exonit.redmine.client.play26ws
import java.net.{URL, URLEncoder}
import akka.stream.{IOResult, Materializer}
import akka.stream.scaladsl.StreamConverters
import by.exonit.redmine.client.managers.WebClient
import by.exonit.redmine.client.managers.WebClient._
import cats.{~>, Id}
import monix.eval.Task
import play.api.libs.ws.{WSClient, WSRequest, WSResponse}
import scala.util.control.NonFatal
class Play26WSWebClient(val client: WSClient)
(implicit mat: Materializer)
extends WebClient {
import Implicits._
def compileRequestCommand(requestCommand: RequestDSL.Request[Unit]): WSRequest = {
var url: Option[String] = None
var queryParams: Seq[(String, String)] = Seq.empty
var headers: Map[String, String] = Map.empty
var method: String = "GET"
var auth: Option[RequestDSL.AuthenticationMethod] = None
var body: Option[RequestDSL.Body] = None
requestCommand.foldMap(new (RequestDSL.RequestOp ~> Id) {
override def apply[A](fa: RequestDSL.RequestOp[A]) = fa match {
case RequestDSL.SetUrl(u) =>
url = Some(u)
()
case RequestDSL.AddSegments(segments @ _*) =>
val encodedSegments = segments.map(URLEncoder.encode(_, "UTF-8"))
url = url.map {u =>
val baseUrl = new URL(u)
val newUrl = new URL(baseUrl, s"${baseUrl.getPath.trimRight('/')}/${encodedSegments.mkString("/")}")
newUrl.toExternalForm
}
()
case RequestDSL.AddQueries(queries @ _*) =>
queryParams ++= queries
()
case RequestDSL.SetHeaders(h @ _*) =>
headers ++= h
()
case RequestDSL.SetMethod(m) =>
method = m
()
case RequestDSL.SetAuth(a) =>
auth = Some(a)
()
case RequestDSL.SetBody(b) =>
b match {
case RequestDSL.Body.EmptyBody() =>
body = None
case bb =>
body = Some(bb)
}
()
case RequestDSL.NoOp() =>
()
}
})
val finalUrl = url.getOrElse(throw new UnsupportedOperationException("Unable to compile request from provided AST: no base URL specified"))
val baseRequest = client.url(finalUrl)
.withMethod(method)
.addHttpHeaders(headers.toSeq: _*)
.addQueryStringParameters(queryParams: _*)
val requestWithAuth = auth match {
case Some(x) => baseRequest.withDslAuth(x)
case None => baseRequest
}
body match {
case Some(b) => requestWithAuth.withDslBody(b)
case None => requestWithAuth
}
}
def compileResponseCommand[T](responseCommand: ResponseDSL.Response[T]): WSResponse => T = response => {
responseCommand.foldMap(new (ResponseDSL.ResponseOp ~> Id) {
override def apply[A](fa: ResponseDSL.ResponseOp[A]) = fa match {
case ResponseDSL.GetBodyAsBytes() =>
response.bodyAsBytes.toArray
case ResponseDSL.GetBodyAsString() =>
response.body
case ResponseDSL.GetStatusCode() =>
response.status
case ResponseDSL.GetStatusText() =>
response.statusText
case ResponseDSL.GetHeaders() =>
response.headers.mapValues(_.mkString(","))
}
})
}
def compileStreamingResponseCommand[T](
responseCommand: StreamingResponseDSL.StreamingResponse[T]
): WSResponse => T = response => {
responseCommand.foldMap(new (StreamingResponseDSL.StreamingResponseOp ~> Id) {
override def apply[A](fa: StreamingResponseDSL.StreamingResponseOp[A]) = fa match {
case StreamingResponseDSL.GetBodyStream(osp) =>
Task.deferFuture {
response.bodyAsSource.runWith(StreamConverters.fromOutputStream(osp))
} flatMap {
case r: IOResult if r.wasSuccessful => Task.unit
case r: IOResult => Task.raiseError(r.getError)
}
case StreamingResponseDSL.GetStatusCode() =>
response.status
case StreamingResponseDSL.GetHeaders() =>
response.headers.mapValues(_.mkString(","))
}
})
}
override def execute[T](requestCommand: RequestDSL.Request[Unit], responseCommand: ResponseDSL.Response[T]):
Task[T] =
Task.evalOnce {
compileRequestCommand(requestCommand)
} flatMap {req => Task.fromFuture(req.execute())} map {
compileResponseCommand(responseCommand)
}
override def executeStreaming[T](
requestCommand: RequestDSL.Request[Unit],
responseCommand: StreamingResponseDSL.StreamingResponse[T]
): Task[T] =
Task.evalOnce {
compileRequestCommand(requestCommand)
} flatMap {req => Task.fromFuture(req.stream())} map {
compileStreamingResponseCommand(responseCommand)
}
def close(): Unit = {
try {
client.close()
} catch {
case NonFatal(_) =>
}
}
}
| exon-it/redmine-scala-client | client-play26-ws/src/main/scala/by/exonit/redmine/client/play26ws/Play26WSWebClient.scala | Scala | apache-2.0 | 5,458 |
package org.jetbrains.plugins.scala.editor.folding
import com.intellij.openapi.util.TextRange
import org.jetbrains.plugins.scala.base.ScalaLightCodeInsightFixtureTestAdapter
import org.jetbrains.plugins.scala.lang.folding.ScalaFoldingBuilder
import org.jetbrains.plugins.scala.util.MultilineStringUtil
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
/**
* User: Dmitry.Naydanov
* Date: 14.08.15.
*/
class ScalaEditorFoldingTest extends ScalaLightCodeInsightFixtureTestAdapter {
private val FOLD_START_MARKER = "<|fold>"
private val FOLD_END_MARKER = "</fold>"
private val FOLD_MARKER_LENGTH = FOLD_START_MARKER.length
private[this] val ST = FOLD_START_MARKER
private[this] val END = FOLD_END_MARKER
private def genericCheckRegions(fileText: String) {
val myRegions = new ArrayBuffer[TextRange]()
val myFileText = new StringBuilder(fileText.length)
val myOpenMarkers = mutable.Stack[Int]()
var i1 = fileText indexOf FOLD_START_MARKER
var i2 = fileText indexOf FOLD_END_MARKER
var overallFixOffset = 0
@inline def increaseOverall(): Unit = overallFixOffset += FOLD_MARKER_LENGTH
@inline def appendPair(errorPlaceMsg: String) {
assert(myOpenMarkers.nonEmpty, "Unbalanced fold markers " + errorPlaceMsg)
val st = myOpenMarkers.pop()
myRegions += new TextRange(st, i2 - overallFixOffset)
}
assert(i1 > -1 && i2 > -2, s"Bad fold markers: $i1 and $i2")
myFileText append fileText.substring(0, i1)
while (i1 > -1 || i2 > -1) {
if (i2 < i1 && i2 > -1) {
appendPair("#1")
val i2Old = i2
i2 = fileText.indexOf(FOLD_END_MARKER, i2Old + 1)
myFileText append fileText.substring(i2Old + FOLD_MARKER_LENGTH, if (i2 > 0) Math.min(i2, i1) else i1)
increaseOverall()
} else if (i1 < i2 && i1 > -1) {
myOpenMarkers.push(i1 - overallFixOffset)
increaseOverall()
val i1Old = i1
i1 = fileText.indexOf(FOLD_START_MARKER, i1Old + 1)
myFileText append fileText.substring(i1Old + FOLD_MARKER_LENGTH, if (i1 > -1) Math.min(i2, i1) else i2)
} else if (i1 < i2) { //i1 == -1
appendPair("#1.5")
increaseOverall()
val i2Old = i2
i2 = fileText.indexOf(FOLD_END_MARKER, i2Old + 1)
myFileText.append (
if (i2 == -1) fileText.substring(i2Old + FOLD_MARKER_LENGTH) else fileText.substring(i2Old + FOLD_MARKER_LENGTH, i2)
)
} else assert(assertion = false, "Unbalanced fold markers #2")
}
assert(myOpenMarkers.isEmpty, s"Unbalanced fold markers #3: ${myOpenMarkers.mkString}")
val assumedRegionRanges = myRegions.result().sortWith((x, y) => x.getStartOffset < y.getStartOffset)
myFixture.configureByText("dummy.scala", myFileText.result())
val myBuilder = new ScalaFoldingBuilder
val regions = myBuilder.buildFoldRegions(myFixture.getFile.getNode, myFixture getDocument myFixture.getFile)
assert(regions.length == assumedRegionRanges.size, s"Different region count, expected: ${assumedRegionRanges.size}, but got: ${regions.length}")
(regions zip assumedRegionRanges).zipWithIndex foreach {
case ((region, assumedRange), idx) =>
assert(region.getRange.getStartOffset == assumedRange.getStartOffset,
s"Different start offsets in region #$idx : expected ${assumedRange.getStartOffset}, but got ${region.getRange.getStartOffset}")
assert(region.getRange.getEndOffset == assumedRange.getEndOffset,
s"Different end offsets in region #$idx : expected ${assumedRange.getEndOffset}, but got ${region.getRange.getEndOffset}")
}
}
def testNested() {
val text =
s""" class A $ST{
| 1 match $ST{
| case 1 => $ST{
| //azaza
| }$END
| }$END
|
| object Azazaible $ST{
| for (i <- 1 to 10) $ST{
| println("azaza!")
| }$END
| }$END
|
| def boo() $ST{
| if (true) $ST{
| //azaza
| }$END
| }$END
| }$END
""".stripMargin.replace("\\r", "")
genericCheckRegions(text)
}
def testMatchBody() {
val text =
s"""
| 1 match $ST{
| case 1 =>
| }$END
""".stripMargin.replace("\\r", "")
genericCheckRegions(text)
}
def testClassBody() {
val text =
s"""
| class A $ST{
| //azaza
| }$END
""".stripMargin.replace("\\r", "")
genericCheckRegions(text)
}
def testMethodBody() {
val text =
s"""
| def boo() $ST{
|
| }$END
""".stripMargin.replace("\\r", "")
genericCheckRegions(text)
}
def testIfBody() {
val text =
s"""
| if (true) $ST{
| println("")
| }$END
""".stripMargin.replace("\\r", "")
genericCheckRegions(text)
}
def testMatchInner() {
val text =
s"""
|1 match $ST{
| case 1 => $ST{
|
| }$END
| }$END
""".stripMargin.replace("\\r", "")
genericCheckRegions(text)
}
def testLambdaArgs() {
val text =
s"""
| def foo(i: Int => Int, j: Int) = i(j)
|
| foo$ST(
| jj => jj + 1, 123
| )$END
""".stripMargin.replace("\\r", "")
genericCheckRegions(text)
}
def testSelectorImport() {
val text =
s"""
| import ${ST}scala.collection.mutable.{
| AbstractSeq, ArrayOps, Buffer
| }$END
|
| class A $ST{
|
| }$END
""".stripMargin.replace("\\r", "")
genericCheckRegions(text)
}
def testBlockComment() {
val text =
s"""
| $ST/*
| * Marker trait
| */$END
| trait MyMarker
""".stripMargin.replace("\\r", "")
genericCheckRegions(text)
}
def testDocComment() {
val text =
s"""
| $ST/**
| * Marker trait
| */$END
| trait MyMarker
""".stripMargin.replace("\\r", "")
genericCheckRegions(text)
}
def testMlString() {
val text =
s"""
| val tratata =
| $ST${MultilineStringUtil.multilineQuotes}
| aaaaaa
| aaaaaa
| aaaaaa
| aaaaaa
| ${MultilineStringUtil.multilineQuotes}$END
""".stripMargin.replace("\\r", "")
genericCheckRegions(text)
}
}
| whorbowicz/intellij-scala | test/org/jetbrains/plugins/scala/editor/folding/ScalaEditorFoldingTest.scala | Scala | apache-2.0 | 6,537 |
package ${package}
import better.files.File
import org.apache.commons.configuration.PropertiesConfiguration
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import scala.collection.JavaConverters._
class DebugConfigSpec extends AnyFlatSpec with Matchers {
val configDir: File = File("src/main/assembly/dist/cfg")
val debugConfigDir: File = File("src/test/resources/debug-config")
"debug-config" should "contain the same files as src/main/assembly/dist/cfg" in {
val filesInDebugConfig = debugConfigDir.list.toSet
val filesInDistCfg = configDir.list.toSet
filesInDebugConfig.map(_.name) shouldBe filesInDistCfg.map(_.name)
}
it should "contain an application.properties with the same keys as the one in src/main/assembly/dist/cfg" in {
val propsInDebugConfig = new PropertiesConfiguration((debugConfigDir / "application.properties").toJava)
val propsInDistCfg = new PropertiesConfiguration((configDir / "application.properties").toJava)
propsInDebugConfig.getKeys.asScala.toSet shouldBe propsInDistCfg.getKeys.asScala.toSet
}
}
| DANS-KNAW/easy-module-archetype | src/main/resources/archetype-resources/src/test/scala/DebugConfigSpec.scala | Scala | apache-2.0 | 1,110 |
package com.github.akiomik.leap_scala
import com.leapmotion.leap.Config
object StaticConfig extends StaticConfig
trait StaticConfig {
type ValueType = Config.ValueType
val ValueType = StaticValueType
}
| akiomik/leap-scala | src/main/scala/com/github/akiomik/leap_scala/StaticConfig.scala | Scala | mit | 211 |
package reactivemongo.api
import scala.language.higherKinds
import scala.util.Try
import reactivemongo.bson.buffer.{ ReadableBuffer, WritableBuffer }
trait SerializationPack { self: Singleton =>
type Value
type ElementProducer
type Document <: Value
type Writer[A]
type Reader[A]
type NarrowValueReader[A]
private[reactivemongo]type WidenValueReader[A]
def IdentityWriter: Writer[Document]
def IdentityReader: Reader[Document]
def serialize[A](a: A, writer: Writer[A]): Document
def deserialize[A](document: Document, reader: Reader[A]): A
def writeToBuffer(buffer: WritableBuffer, document: Document): WritableBuffer
def readFromBuffer(buffer: ReadableBuffer): Document
def serializeAndWrite[A](buffer: WritableBuffer, document: A, writer: Writer[A]): WritableBuffer = writeToBuffer(buffer, serialize(document, writer))
def readAndDeserialize[A](buffer: ReadableBuffer, reader: Reader[A]): A =
deserialize(readFromBuffer(buffer), reader)
import reactivemongo.core.protocol.Response
import reactivemongo.core.netty.ChannelBufferReadableBuffer
final def readAndDeserialize[A](response: Response, reader: Reader[A]): A = {
val buf = response.documents
val channelBuf = ChannelBufferReadableBuffer(buf.readBytes(buf.getInt(buf.readerIndex)))
readAndDeserialize(channelBuf, reader)
}
def writer[A](f: A => Document): Writer[A]
def isEmpty(document: Document): Boolean
def widenReader[T](r: NarrowValueReader[T]): WidenValueReader[T]
def readValue[A](value: Value, reader: WidenValueReader[A]): Try[A]
}
/** The default serialization pack. */
object BSONSerializationPack extends SerializationPack {
import reactivemongo.bson._, buffer.DefaultBufferHandler
type Value = BSONValue
type ElementProducer = Producer[BSONElement]
type Document = BSONDocument
type Writer[A] = BSONDocumentWriter[A]
type Reader[A] = BSONDocumentReader[A]
type NarrowValueReader[A] = BSONReader[_ <: BSONValue, A]
private[reactivemongo]type WidenValueReader[A] = UnsafeBSONReader[A]
object IdentityReader extends Reader[Document] {
def read(document: Document): Document = document
}
object IdentityWriter extends Writer[Document] {
def write(document: Document): Document = document
}
def serialize[A](a: A, writer: Writer[A]): Document = writer.write(a)
def deserialize[A](document: Document, reader: Reader[A]): A =
reader.read(document)
def writeToBuffer(buffer: WritableBuffer, document: Document): WritableBuffer = DefaultBufferHandler.writeDocument(document, buffer)
def readFromBuffer(buffer: ReadableBuffer): Document =
DefaultBufferHandler.readDocument(buffer).get
def writer[A](f: A => Document): Writer[A] = new BSONDocumentWriter[A] {
def write(input: A): Document = f(input)
}
def isEmpty(document: Document) = document.isEmpty
def widenReader[T](r: NarrowValueReader[T]): WidenValueReader[T] =
r.widenReader
def readValue[A](value: Value, reader: WidenValueReader[A]): Try[A] =
reader.readTry(value)
}
| maxime-gautre/ReactiveMongo | driver/src/main/scala/api/serializationpack.scala | Scala | apache-2.0 | 3,047 |
package test.scala.systemTestingDSL
object HipTestCaseData {
val arr_sumOutput = """!!!Full processing file "hip/array/arr_sum.ss"
Parsing file "hip/array/arr_sum.ss" by default parser...
WARNING : parsing problem dom is neither a ranking function nor a relation nor a heap predicate
WARNING : parsing problem dom is neither a ranking function nor a relation nor a heap predicate
!!! processing primitives "["prelude.ss"]
Starting Omega.../usr/local/bin/oc
ERROR: at hip/array/arr_sum.ss_12:27_12:30
Message: TYPE ERROR 1 : Found NUM but expecting int
gather_type_info_b_formula: relation sumarray
Checking procedure sigmaleft$int[]~int~int... Omega Error Exp:Globals.Illegal_Prover_Format("Omega.omega_of_exp: array, bag or list constraint a'[j']")
Formula: !(v_bool_37_1000') & i'<=j' & j<=k & t<=i & dom(a,t,k) & j'=j & i'=i &
v_int_40_1078+1=j' & t=t_1065 & k=k_1066 & dom(a',t_1065,k_1066) &
t_1065<=i' & v_int_40_1078<=k_1066 &
sumarray(a',i',v_int_40_1078,v_int_40_998') & a=a' & t=ahalb & k=ahaub &
t_1065=ahalb & k_1066=ahaub & dom(a',ahalb,ahaub) & ahalb<=j' & j'<=ahaub &
res=a'[j']
!!! WARNING logtime exception:0.004
Procedure sigmaleft$int[]~int~int FAIL.(2)
Exception Globals.Illegal_Prover_Format("Omega.omega_of_exp: array, bag or list constraint a'[j']") Occurred!
Error(s) detected when checking procedure sigmaleft$int[]~int~int
Omega Error Exp:Globals.Illegal_Prover_Format("Omega.omega_of_exp: array, bag or list constraint a'[i']")
Formula: !(v_bool_27_1032') & i'<=j' & dom(a,i,j) & i<=j & j'=j & i'=i & a=a' &
i=ahalb & j=ahaub & dom(a',ahalb,ahaub) & ahalb<=i' & i'<=ahaub & res=a'[i']
!!! WARNING logtime exception:0.
Checking procedure sigmaright$int[]~int~int... Omega Error Exp:Globals.Illegal_Prover_Format("Omega.omega_of_exp: array, bag or list constraint a'[i']")
Formula: !(v_bool_27_1032') & i'<=j' & dom(a,i,j) & i<=j & j'=j & i'=i & a=a' &
i=ahalb & j=ahaub & dom(a',ahalb,ahaub) & ahalb<=i' & i'<=ahaub & res=a'[i']
!!! WARNING logtime exception:0.
Procedure sigmaright$int[]~int~int FAIL.(2)
Exception Globals.Illegal_Prover_Format("Omega.omega_of_exp: array, bag or list constraint a'[i']") Occurred!
Error(s) detected when checking procedure sigmaright$int[]~int~int
Checking procedure test$...
Procedure test$ SUCCESS.
Stop Omega... 62 invocations
2 false contexts at: ( (28,2) (28,9) )
!!! log(small):(0.041829,215)
Total verification time: 0.136008 second(s)
Time spent in main process: 0.100006 second(s)
Time spent in child processes: 0.036002 second(s)
"""
} | rohitmukherjee/High-Performance-DSLs | src/test/scala/systemTestingDSL/HipTestCaseData.scala | Scala | mit | 2,534 |
package scalan.meta
import com.typesafe.scalalogging.slf4j.StrictLogging
class BoilerplateTool extends StrictLogging {
val scalanTypeSynonyms = Map(
"Conv" -> "Converter"
)
lazy val scalanConfig = CodegenConfig(
name = "scalan",
srcPath = "../core/src/main/scala",
entityFiles = List(
"scalan/Converters.scala",
"scalan/Views.scala"
),
scalanTypeSynonyms,
baseContextTrait = "", // not defined means not declare
seqContextTrait = "",
stagedContextTrait = ""
)
val coreTypeSynonyms = scalanTypeSynonyms ++ Map(
"RThrow" -> "Throwable",
"Arr" -> "Array",
"MM" -> "MMap",
"IntRep" -> "Int",
"DoubleRep" -> "Double",
"BoolRep" -> "Boolean",
"UnitRep" -> "Unit",
"NothingRep" -> "Nothing",
"ByteRep" -> "Byte",
"ShortRep" -> "Short",
"CharRep" -> "Char",
"LongRep" -> "Long",
"FloatRep" -> "Float",
"DoubleRep" -> "Double",
"ROption" -> "SOption"
)
lazy val coreConfig = CodegenConfig(
name = "core",
srcPath = "../core/src/main/scala",
entityFiles = List(
"scalan/primitives/AbstractStrings.scala",
"scalan/util/Exceptions.scala"
),
coreTypeSynonyms,
baseContextTrait = "scalan.Scalan",
seqContextTrait = "scalan.ScalanStd",
stagedContextTrait = "scalan.ScalanExp"
)
val coreTestsTypeSynonyms = coreTypeSynonyms ++ Map(
"RSeg" -> "Segment",
"RMetaTest" -> "MetaTest"
)
lazy val coreTestsConfig = CodegenConfig(
name = "coretests",
srcPath = "../core/src/test/scala",
entityFiles = List(
"scalan/common/Segments.scala",
"scalan/common/Kinds.scala",
"scalan/common/MetaTests.scala"
),
coreTestsTypeSynonyms
)
val collectTypeSynonyms = coreTypeSynonyms ++ Map(
"Coll" -> "Collection", "PairColl" -> "PairCollection", "NColl" -> "NestedCollection"
)
lazy val collectionsConfig = CodegenConfig(
name = "collections",
srcPath = "../collections/src/main/scala",
entityFiles = List(
"scalan/collections/HashSets.scala"
, "scalan/collections/Seqs.scala"
, "scalan/collections/MultiMap.scala"
, "scalan/collections/BitSets.scala"
, "scalan/collections/Collections.scala"
),
collectTypeSynonyms
)
val laTypeSynonyms = collectTypeSynonyms ++ Map(
"Vec" -> "Vector", "Matr" -> "Matrix"
)
lazy val laConfig = CodegenConfig(
name = "la",
srcPath = "../linear-algebra/src/main/scala",
entityFiles = List(
"scalan/linalgebra/Vectors.scala"
, "scalan/linalgebra/Matrices.scala"
),
laTypeSynonyms
)
val eeTypeSynonyms = coreTypeSynonyms ++ Map(
"PS" -> "PSet", "Dist" -> "Distributed", "PA" -> "PArray", "NA" -> "NestedArray", "PM" -> "PMap",
"Vec" -> "Vector", "Matr" -> "Matrix"
)
lazy val eeConfig = CodegenConfig(
name = "ee",
srcPath = "../../scalan/src/main/scala",
entityFiles = List(
"scalan/trees/Trees.scala",
"scalan/math/Matrices.scala",
"scalan/math/Vectors.scala",
"scalan/collections/PSets.scala",
"scalan/dists/Dists.scala",
"scalan/parrays/PArrays.scala"
),
eeTypeSynonyms,
baseContextTrait = "scalan.ScalanEnterprise",
seqContextTrait = "scalan.ScalanEnterpriseStd",
stagedContextTrait = "scalan.ScalanEnterpriseExp"
)
val effectsTypeSynonyms = Map(
"RFree" -> "Free",
"RCoproduct" -> "Coproduct",
"RepInteract" -> "Interact",
"RepAuth" -> "Auth"
)
lazy val effectsConfig = CodegenConfig(
name = "effects",
srcPath = "../effects/src/test/scala/",
entityFiles = List(
"scalan/monads/IOs.scala",
"scalan/monads/Readers.scala",
"scalan/monads/States.scala",
"scalan/monads/FreeStates.scala",
"scalan/monads/FreeMs.scala",
"scalan/monads/Processes.scala",
"scalan/monads/Frees.scala",
"scalan/monads/Coproducts.scala",
"scalan/monads/Interactions.scala",
"scalan/monads/Auths.scala"
),
effectsTypeSynonyms
)
val graphTypeSynonyms = collectTypeSynonyms ++ Map("PG" -> "Graph", "REdge" -> "EdgeType")
lazy val graphConfig = CodegenConfig(
name = "graphs",
srcPath = "../graphs/src/main/scala",
entityFiles = List(
"scalan/graphs/Graphs.scala",
"scalan/graphs/Vertices.scala",
"scalan/graphs/Edges.scala",
"scalan/graphs/Fronts.scala"
),
graphTypeSynonyms
)
val structsTypeSynonyms = Map[String, String]()
lazy val structsConfig = CodegenConfig(
name = "structs",
srcPath = "../core/src/main/scala",
entityFiles = List(
// "scalan/primitives/Structs.scala",
"scalan/primitives/StructKeys.scala",
"scalan/primitives/StructItems.scala"
),
structsTypeSynonyms,
baseContextTrait = "", // not defined means not declare
seqContextTrait = "",
stagedContextTrait = ""
)
def getConfigs(args: Array[String]): Seq[CodegenConfig] =
args.flatMap { arg => configsMap.getOrElse(arg,
sys.error(s"Unknown codegen config $arg. Allowed values: ${configsMap.keySet.mkString(", ")}"))
}.distinct
val configsMap = Map(
"scalan" -> List(scalanConfig),
"structs" -> List(structsConfig),
"core" -> List(coreConfig),
"core-tests" -> List(coreTestsConfig),
"collections" -> List(collectionsConfig),
"la" -> List(laConfig),
"graphs" -> List(graphConfig),
"ee" -> List(eeConfig),
"effects" -> List(effectsConfig),
"lib-all" -> List(scalanConfig, coreConfig, coreTestsConfig, collectionsConfig, laConfig, graphConfig, effectsConfig, structsConfig),
"all" -> List(scalanConfig, coreConfig, coreTestsConfig, collectionsConfig, laConfig, graphConfig, effectsConfig, structsConfig)
)
def main(args: Array[String]) {
val configs = getConfigs(args)
if (configs.isEmpty) {
logger.warn("BoilerplateTool run without configs")
} else {
for (c <- configs) {
println(s"Processing ${c.srcPath}")
new EntityManagement(c).generateAll()
println(s"Ok\\n")
}
}
}
}
object BoilerplateToolRun extends BoilerplateTool {
}
| PCMNN/scalan-ce | meta/src/main/scala/scalan/meta/BoilerplateTool.scala | Scala | apache-2.0 | 6,130 |
/*
* Copyright (c) 2015 Daniel Higuero.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scala.examples.basics.visibility
import org.scala.examples.basics.visibility.other.ClassC
object Visibility{
/**
* Visibility main.
* @param args The arguments.
*/
def main(args: Array[String]) : Unit = {
val v = new Visibility
v.checkVisibility()
v.printPrivateValues()
}
}
/**
* Variable and method visibility.
*/
class Visibility {
/**
* Method to check the visibility.
*/
private def checkVisibility() : Unit = {
val a = new ClassA
val b = new ClassB
val c = new ClassC
println(a.publicVal)
println(b.publicVal)
println(c.publicVal)
}
/**
* Print the private values of ClassA, ClassB, and ClassC.
*/
private def printPrivateValues() : Unit = {
// TODO Add a new method in ClassA, ClassB, and ClassC to print the values.
}
}
| dhiguero/scala-exercises | src/main/scala/org/scala/examples/basics/visibility/Visibility.scala | Scala | apache-2.0 | 1,430 |
package com.twitter.finagle.http
import java.util.{Map => JMap, List => JList, Collections}
import scala.collection.JavaConverters._
object Uri {
/**
* Constructs a Uri from the Host header and path component of a Request.
*/
def fromRequest(req: Request): Uri = {
val uri = req.uri
uri.indexOf('?') match {
case -1 => new Uri(req.host, uri, None)
case n => new Uri(req.host, uri.substring(0, n), Some(uri.substring(n + 1, uri.length)))
}
}
}
/**
* Represents an immutable URI.
*/
final class Uri private (host: Option[String], path: String, query: Option[String]) {
def this(host: String, path: String, query: String) =
this(Some(host), path, Some(query))
def this(host: String, path: String) =
this(Some(host), path, None)
override def toString: String = {
val prefix = host.getOrElse("")
query match {
case Some(q) => s"$prefix$path?$q"
case None => s"$prefix$path"
}
}
/**
* Extracts the parameters from the query string and returns a ParamMap.
*/
def params: ParamMap = _params
private[this] val _params: ParamMap = {
val decoded: JMap[String, JList[String]] = query match {
case Some(q) => QueryParamDecoder.decodeParams(q)
case None => Collections.emptyMap[String, JList[String]]
}
val map: Map[String, Seq[String]] = decoded.asScala.toMap.mapValues(_.asScala)
new MapParamMap(map)
}
}
| luciferous/finagle | finagle-base-http/src/main/scala/com/twitter/finagle/http/Uri.scala | Scala | apache-2.0 | 1,421 |
package com.cloudray.scalapress.folder.controller.admin
import com.cloudray.scalapress.section.{Section, SectionDao}
/** @author Stephen Samuel */
trait SectionSorting {
val sectionDao: SectionDao
def reorderSections(order: String, sections: Iterable[Section]) {
val ids = order.split("-")
if (ids.size == sections.size)
sections.foreach(section => {
val pos = ids.indexOf(section.id.toString)
section.position = pos
sectionDao.save(section)
})
}
}
| vidyacraghav/scalapress | src/main/scala/com/cloudray/scalapress/folder/controller/admin/SectionSorting.scala | Scala | apache-2.0 | 503 |
package nl.bridgeworks.akka.rete
sealed trait Op
case object Equals extends Op
case object LessThan extends Op
case object GreaterThan extends Op
sealed trait Expr
case class Simple(concept: String) extends Expr
case class ValueOp(concept: String, op: Op, value: AnyVal) extends Expr
//TODO is this the right way to have a common "concept" property in both child case classes?
sealed trait Fact {
val concept:String
}
case class ConceptOnly(concept: String) extends Fact {
override def toString:String = s"Fact($concept)"
}
case class ConceptWithValue(concept: String, value: AnyVal) extends Fact {
override def toString:String = s"Fact($concept){$value}"
}
case class Rule(id: String, lhs: Vector[Expr], rhs: Vector[Fact])
case class Assertion(facts: Vector[Fact], inferenceRunId: String) {
override def toString:String = s"Assertion($facts){$inferenceRunId}"
}
sealed trait Side
case object Left extends Side
case object Right extends Side
//in case a beta node is connected to a terminal node
case object NA extends Side
| bridgeworks-nl/scala-rete | src/main/scala/nl/bridgeworks/akka/rete/Fact.scala | Scala | mit | 1,037 |
package com.github.bluenote
import org.lwjgl.opengl.GL11
import org.lwjgl.opengl.GL20
import org.lwjgl.opengl.GL11
trait VertexData {
val rawData: Array[Float]
val primitiveType: Int
val floatsPerVertex: Int
val numVertices: Int
val strideInBytes: Int
def setVertexAttribArrayAndPointer(shader: Shader)
def ++(that: VertexData): VertexData
}
class VertexData3D_NC(val rawData: Array[Float], val primitiveType: Int = GL11.GL_TRIANGLES) extends VertexData {
val floatsPerVertex = 10
val numVertices = rawData.length / floatsPerVertex
val strideInBytes = floatsPerVertex * 4
assert(rawData.length % floatsPerVertex == 0)
def ++(that: VertexData): VertexData3D_NC = new VertexData3D_NC(this.rawData ++ that.rawData)
def setVertexAttribArrayAndPointer(shader: Shader) {
shader.vertexAttributes match {
case va: VertexAttributes with HasVrtxAttrPos3D with HasVrtxAttrNormal with HasVrtxAttrColor =>
GL20.glEnableVertexAttribArray(va.locPos3D)
GL20.glEnableVertexAttribArray(va.locNormal)
GL20.glEnableVertexAttribArray(va.locColor)
GlWrapper.checkGlError("after enabling vertex attrib array")
GL20.glVertexAttribPointer(va.locPos3D, 3, GL11.GL_FLOAT, false, strideInBytes, 0)
GL20.glVertexAttribPointer(va.locNormal, 3, GL11.GL_FLOAT, false, strideInBytes, 12)
GL20.glVertexAttribPointer(va.locColor, 4, GL11.GL_FLOAT, false, strideInBytes, 24)
GlWrapper.checkGlError("after setting the attrib pointers")
case _ => throw new Exception("Shader does not provide required vertex attributes")
}
}
/**
* Generic modification of an existing VNC data according to a transformation matrix
*
* In general transforming VNC data requires to transform both position and normals.
* In case of non-uniform scaling the transformation of positions and normals is not
* the same, therefore the most general form requires two transformation matrices,
* which is obviously annoying. See below for alternatives.
*/
def transform(modMatrixPos: Mat4f, modMatrixNrm: Mat3f): VertexData3D_NC = {
val newVertexData = rawData.clone()
for (ii <- Range(0, numVertices)) {
val i = ii*floatsPerVertex
val pos = new Vec4f(rawData(i) , rawData(i+1), rawData(i+2), 1f)
val nrm = new Vec3f(rawData(i+3), rawData(i+4), rawData(i+5))
val newPos = modMatrixPos * pos
val newNrm = modMatrixNrm * nrm
newVertexData(i ) = newPos.x
newVertexData(i+1) = newPos.y
newVertexData(i+2) = newPos.z
newVertexData(i+3) = newNrm.x
newVertexData(i+4) = newNrm.y
newVertexData(i+5) = newNrm.z
}
return new VertexData3D_NC(newVertexData, primitiveType)
}
/**
* This provides a simplified interface of the above transformation.
* The normal transformation matrix is calculated internally by taking the inverse transpose.
*/
def transform(modMatrixPos: Mat4f): VertexData3D_NC = {
val modMatrixNrm = Mat3f.createFromMat4f(modMatrixPos).inverse().transpose()
transform(modMatrixPos, modMatrixNrm)
}
/**
* In case our transformations have uniform scale the above is overkill;
* we can simply use the position transformation matrix for normals as well
*/
def transformSimple(modMatrixPos: Mat4f): VertexData3D_NC = {
transform(modMatrixPos, Mat3f.createFromMat4f(modMatrixPos))
}
}
/**
* Collection of vertex data generators for a few basic shapes.
* Return VertexData is of type VertexData3D_NC
*/
object VertexDataGen3D_NC {
// some shorthands to simplify notation
type V = Vec3f
case class C(r: Float, g: Float, b: Float, a: Float) {
def arr = Array(r, g, b, a)
}
/**
* Generic cube
*/
def cube(x1: Float, x2: Float, y1: Float, y2: Float, z1: Float, z2: Float, color: Color): VertexData3D_NC = {
val p1 = Vec3f(x1 min x2, y1 min y2, z1 max z2)
val p2 = Vec3f(x1 max x2, y1 min y2, z1 max z2)
val p3 = Vec3f(x1 min x2, y1 max y2, z1 max z2)
val p4 = Vec3f(x1 max x2, y1 max y2, z1 max z2)
val p5 = Vec3f(x1 min x2, y1 min y2, z1 min z2)
val p6 = Vec3f(x1 max x2, y1 min y2, z1 min z2)
val p7 = Vec3f(x1 min x2, y1 max y2, z1 min z2)
val p8 = Vec3f(x1 max x2, y1 max y2, z1 min z2)
val carr = color.toArr
val triangles =
// front face
p1.arr ++ Vec3f(0,0,+1).arr ++ carr ++ p2.arr ++ Vec3f(0,0,+1).arr ++ carr ++ p4.arr ++ Vec3f(0,0,+1).arr ++ carr ++
p4.arr ++ Vec3f(0,0,+1).arr ++ carr ++ p3.arr ++ Vec3f(0,0,+1).arr ++ carr ++ p1.arr ++ Vec3f(0,0,+1).arr ++ carr ++
// back face
p5.arr ++ Vec3f(0,0,-1).arr ++ carr ++ p7.arr ++ Vec3f(0,0,-1).arr ++ carr ++ p8.arr ++ Vec3f(0,0,-1).arr ++ carr ++
p8.arr ++ Vec3f(0,0,-1).arr ++ carr ++ p6.arr ++ Vec3f(0,0,-1).arr ++ carr ++ p5.arr ++ Vec3f(0,0,-1).arr ++ carr ++
// right face
p2.arr ++ Vec3f(+1,0,0).arr ++ carr ++ p6.arr ++ Vec3f(+1,0,0).arr ++ carr ++ p8.arr ++ Vec3f(+1,0,0).arr ++ carr ++
p8.arr ++ Vec3f(+1,0,0).arr ++ carr ++ p4.arr ++ Vec3f(+1,0,0).arr ++ carr ++ p2.arr ++ Vec3f(+1,0,0).arr ++ carr ++
// left face
p1.arr ++ Vec3f(-1,0,0).arr ++ carr ++ p3.arr ++ Vec3f(-1,0,0).arr ++ carr ++ p7.arr ++ Vec3f(-1,0,0).arr ++ carr ++
p7.arr ++ Vec3f(-1,0,0).arr ++ carr ++ p5.arr ++ Vec3f(-1,0,0).arr ++ carr ++ p1.arr ++ Vec3f(-1,0,0).arr ++ carr ++
// top face
p3.arr ++ Vec3f(0,+1,0).arr ++ carr ++ p4.arr ++ Vec3f(0,+1,0).arr ++ carr ++ p8.arr ++ Vec3f(0,+1,0).arr ++ carr ++
p8.arr ++ Vec3f(0,+1,0).arr ++ carr ++ p7.arr ++ Vec3f(0,+1,0).arr ++ carr ++ p3.arr ++ Vec3f(0,+1,0).arr ++ carr ++
// bottom face
p1.arr ++ Vec3f(0,-1,0).arr ++ carr ++ p5.arr ++ Vec3f(0,-1,0).arr ++ carr ++ p6.arr ++ Vec3f(0,-1,0).arr ++ carr ++
p6.arr ++ Vec3f(0,-1,0).arr ++ carr ++ p2.arr ++ Vec3f(0,-1,0).arr ++ carr ++ p1.arr ++ Vec3f(0,-1,0).arr ++ carr
return new VertexData3D_NC(triangles)
}
/**
* Generic cylinder
* Convention:
* x/z corresponds to rotation plane,
* y corresponds to cylinder axis (with top a +h and bottom at -h)
*
*/
def cylinder(r: Float, h: Float, color: Color, slices: Int = 4, wallOnly: Boolean = false): VertexData3D_NC = {
val carr = color.toArr
val circularIndices = Range(0, slices).toArray :+ 0 // eg 0,1,2,3,0
val circularSlidingTuples = circularIndices.sliding(2).map{ case Array(i,j) => (i,j)}.toArray // eg (0,1),(1,2),(2,3),(3,0)
// precalculate sin/cos values for all indices
val sinValues = circularIndices.map(i => r*math.sin(2*math.Pi * i / slices).toFloat)
val cosValues = circularIndices.map(i => r*math.cos(2*math.Pi * i / slices).toFloat)
// generate wall:
val wallTriangles = circularSlidingTuples.flatMap{ case (i,j) =>
val p1 = Vec3f(sinValues(i), -h, cosValues(i))
val p2 = Vec3f(sinValues(j), -h, cosValues(j))
val p3 = Vec3f(sinValues(j), +h, cosValues(j))
val p4 = Vec3f(sinValues(i), +h, cosValues(i))
val normalI = Vec3f(sinValues(i)/r, 0, cosValues(i)/r)
val normalJ = Vec3f(sinValues(j)/r, 0, cosValues(j)/r)
p1.arr ++ normalI.arr ++ carr ++ p2.arr ++ normalJ.arr ++ carr ++ p3.arr ++ normalJ.arr ++ carr ++
p3.arr ++ normalJ.arr ++ carr ++ p4.arr ++ normalI.arr ++ carr ++ p1.arr ++ normalI.arr ++ carr
}
if (wallOnly) {
return new VertexData3D_NC(wallTriangles)
}
// generate planes:
val planes = for ((y,n) <- List((-h, Vec3f(0,-1,0)), (+h, Vec3f(0,+1,0)))) yield {
val pc = Vec3f(0, y, 0)
val triangles = circularSlidingTuples.flatMap{ case (i,j) =>
val (ii, jj) = if (y > 0) (i,j) else (j,i) // change order depending on side
val p1 = Vec3f(sinValues(ii), y, cosValues(ii))
val p2 = Vec3f(sinValues(jj), y, cosValues(jj))
p1.arr ++ n.arr ++ carr ++ p2.arr ++ n.arr ++ carr ++ pc.arr ++ n.arr ++ carr
}
triangles
}
return new VertexData3D_NC(wallTriangles ++ planes(0) ++ planes(1))
}
def cylinderTwoColors(r: Float, h: Float, colorBottom: Color, colorTop: Color, slices: Int = 4, wallOnly: Boolean = false): VertexData3D_NC = {
val carrB = colorBottom.toArr
val carrT = colorTop.toArr
val circularIndices = Range(0, slices).toArray :+ 0 // eg 0,1,2,3,0
val circularSlidingTuples = circularIndices.sliding(2).map{ case Array(i,j) => (i,j)}.toArray // eg (0,1),(1,2),(2,3),(3,0)
// precalculate sin/cos values for all indices
val sinValues = circularIndices.map(i => r*math.sin(2*math.Pi * i / slices).toFloat)
val cosValues = circularIndices.map(i => r*math.cos(2*math.Pi * i / slices).toFloat)
// generate wall:
val wallTriangles = circularSlidingTuples.flatMap{ case (i,j) =>
val p1 = Vec3f(sinValues(i), -h, cosValues(i))
val p2 = Vec3f(sinValues(j), -h, cosValues(j))
val p3 = Vec3f(sinValues(j), +h, cosValues(j))
val p4 = Vec3f(sinValues(i), +h, cosValues(i))
val normalI = Vec3f(sinValues(i)/r, 0, cosValues(i)/r)
val normalJ = Vec3f(sinValues(j)/r, 0, cosValues(j)/r)
p1.arr ++ normalI.arr ++ carrB ++ p2.arr ++ normalJ.arr ++ carrB ++ p3.arr ++ normalJ.arr ++ carrT ++
p3.arr ++ normalJ.arr ++ carrT ++ p4.arr ++ normalI.arr ++ carrT ++ p1.arr ++ normalI.arr ++ carrB
}
if (wallOnly) {
return new VertexData3D_NC(wallTriangles)
}
// generate planes:
val planes = for ((y,n, carr) <- List((-h, Vec3f(0,-1,0), carrB), (+h, Vec3f(0,+1,0), carrT))) yield {
val pc = Vec3f(0, y, 0)
val triangles = circularSlidingTuples.flatMap{ case (i,j) =>
val (ii, jj) = if (y > 0) (i,j) else (j,i) // change order depending on side
val p1 = Vec3f(sinValues(ii), y, cosValues(ii))
val p2 = Vec3f(sinValues(jj), y, cosValues(jj))
p1.arr ++ n.arr ++ carr ++ p2.arr ++ n.arr ++ carr ++ pc.arr ++ n.arr ++ carr
}
triangles
}
return new VertexData3D_NC(wallTriangles ++ planes(0) ++ planes(1))
}
/**
* A "line" is a thin cylinder connecting two arbitrary points in space
*/
def line(r: Float, p1: Vec3f, p2: Vec3f, color1: Color, color2: Color, slices: Int = 4, wallOnly: Boolean = false): VertexData3D_NC = {
val p1_to_p2 = p2 - p1
val p1_to_p2_norm = p1_to_p2.normalize
val mid = p1 mid p2
val halfLength = p1_to_p2.length / 2
val cylinder = cylinderTwoColors(r, halfLength, color1, color2, slices, wallOnly)
val cylNorm = Vec3f(0, 1, 0)
val rotAxis = p1_to_p2_norm cross cylNorm
val rotAngl = math.acos(p1_to_p2_norm * cylNorm).toFloat
//println(rotAngl, rotAngl*180/math.Pi.toFloat, rotAxis)
return cylinder.transform(Mat4f.translate(mid.x, mid.y, mid.z).rotate(-rotAngl*180/math.Pi.toFloat, rotAxis.x, rotAxis.y, rotAxis.z))
}
/**
* Generic disk
* Convention: centered at y=0, with normal in +y direction
*/
def discVNC(r: Float, color: Color, slices: Int = 16): VertexData3D_NC = {
val carr = color.toArr
val circularIndices = Range(0, slices).toArray :+ 0 // eg 0,1,2,3,0
val circularSlidingTuples = circularIndices.sliding(2).map{ case Array(i,j) => (i,j)}.toArray // eg (0,1),(1,2),(2,3),(3,0)
// precalculate sin/cos values for all indices
val sinValues = circularIndices.map(i => r*math.sin(2*math.Pi * i / slices).toFloat)
val cosValues = circularIndices.map(i => r*math.cos(2*math.Pi * i / slices).toFloat)
// generate planes:
val disc = {
val pc = Vec3f(0,0,0)
val n = Vec3f(0,1,0)
val triangles = circularSlidingTuples.flatMap{ case (i,j) =>
val p1 = Vec3f(sinValues(i), 0, cosValues(i))
val p2 = Vec3f(sinValues(j), 0, cosValues(j))
p1.arr ++ n.arr ++ carr ++ p2.arr ++ n.arr ++ carr ++ pc.arr ++ n.arr ++ carr
}
triangles
}
return new VertexData3D_NC(disc)
}
/**
* Generic sphere
*/
def sphere(r: Float, color: Color, numRecursions: Int = 4): VertexData3D_NC = {
val carr = color.toArr
val p1 = Vec3f(0, -r, 0)
val p2 = Vec3f(0, 0, +r)
val p3 = Vec3f(+r, 0, 0)
val p4 = Vec3f(0, 0, -r)
val p5 = Vec3f(0, +r, 0)
val p6 = Vec3f(-r, 0, 0)
val triangles =
(p1, p3, p2) ::
(p1, p4, p3) ::
(p1, p6, p4) ::
(p1, p2, p6) ::
(p5, p2, p3) ::
(p5, p3, p4) ::
(p5, p4, p6) ::
(p5, p6, p2) ::
Nil
def midPoint(p1: Vec3f, p2: Vec3f) = (p1 mid p2).setLengthTo(r)
def recursiveRefinement(triangles: List[(Vec3f, Vec3f, Vec3f)], numRecursions: Int): List[(Vec3f, Vec3f, Vec3f)] = {
if (numRecursions==0) {
return triangles
} else {
val refinedTriangles = triangles.flatMap{ case (p1, p2, p3) =>
val p4 = midPoint(p1, p2)
val p5 = midPoint(p2, p3)
val p6 = midPoint(p3, p1)
(p1, p4, p6) ::
(p4, p2, p5) ::
(p4, p5, p6) ::
(p6, p5, p3) ::
Nil
}
return recursiveRefinement(refinedTriangles, numRecursions-1)
}
}
val refinedTriangles = recursiveRefinement(triangles, numRecursions)
def vecToNormal(p: Vec3f) = p / r
val allTriangles = refinedTriangles.toArray.flatMap{vertices =>
vertices._1.arr ++ vecToNormal(vertices._1).arr ++ carr ++
vertices._2.arr ++ vecToNormal(vertices._2).arr ++ carr ++
vertices._3.arr ++ vecToNormal(vertices._3).arr ++ carr
}
return new VertexData3D_NC(allTriangles)
}
/**
* Rounded cube
* Simplified over the generic cube in the sense that it is always centered at 0,
* i.e., size is specified in "half width"
*/
/*
def roundedCubeVNC(hwx: Float, hwy: Float, hwz: Float, r: Float, color: Color, detail: Int = 4): VertexData3D_NC = {
val p1 = Vec3f(-hwx, -hwy, +hwz)
val p2 = Vec3f(+hwx, -hwy, +hwz)
val p3 = Vec3f(-hwx, +hwy, +hwz)
val p4 = Vec3f(+hwx, +hwy, +hwz)
val p5 = Vec3f(-hwx, -hwy, -hwz)
val p6 = Vec3f(+hwx, -hwy, -hwz)
val p7 = Vec3f(-hwx, +hwy, -hwz)
val p8 = Vec3f(+hwx, +hwy, -hwz)
val carr = color.toArr
val triangles =
// front face
(p1+Vec3f(+r,+r, 0)).arr ++ Vec3f(0,0,+1).arr ++ carr ++ (p2+Vec3f(-r,+r, 0)).arr ++ Vec3f(0,0,+1).arr ++ carr ++ (p4+Vec3f(-r,-r, 0)).arr ++ Vec3f(0,0,+1).arr ++ carr ++
(p4+Vec3f(-r,-r, 0)).arr ++ Vec3f(0,0,+1).arr ++ carr ++ (p3+Vec3f(+r,-r, 0)).arr ++ Vec3f(0,0,+1).arr ++ carr ++ (p1+Vec3f(+r,+r, 0)).arr ++ Vec3f(0,0,+1).arr ++ carr ++
// back face
(p5+Vec3f(+r,+r, 0)).arr ++ Vec3f(0,0,-1).arr ++ carr ++ (p7+Vec3f(+r,-r, 0)).arr ++ Vec3f(0,0,-1).arr ++ carr ++ (p8+Vec3f(-r,-r, 0)).arr ++ Vec3f(0,0,-1).arr ++ carr ++
(p8+Vec3f(-r,-r, 0)).arr ++ Vec3f(0,0,-1).arr ++ carr ++ (p6+Vec3f(-r,+r, 0)).arr ++ Vec3f(0,0,-1).arr ++ carr ++ (p5+Vec3f(+r,+r, 0)).arr ++ Vec3f(0,0,-1).arr ++ carr ++
// right face
(p2+Vec3f( 0,+r,-r)).arr ++ Vec3f(+1,0,0).arr ++ carr ++ (p6+Vec3f( 0,+r,+r)).arr ++ Vec3f(+1,0,0).arr ++ carr ++ (p8+Vec3f( 0,-r,+r)).arr ++ Vec3f(+1,0,0).arr ++ carr ++
(p8+Vec3f( 0,-r,+r)).arr ++ Vec3f(+1,0,0).arr ++ carr ++ (p4+Vec3f( 0,-r,-r)).arr ++ Vec3f(+1,0,0).arr ++ carr ++ (p2+Vec3f( 0,+r,-r)).arr ++ Vec3f(+1,0,0).arr ++ carr ++
// left face
(p1+Vec3f( 0,+r,-r)).arr ++ Vec3f(-1,0,0).arr ++ carr ++ (p3+Vec3f( 0,-r,-r)).arr ++ Vec3f(-1,0,0).arr ++ carr ++ (p7+Vec3f( 0,-r,+r)).arr ++ Vec3f(-1,0,0).arr ++ carr ++
(p7+Vec3f( 0,-r,+r)).arr ++ Vec3f(-1,0,0).arr ++ carr ++ (p5+Vec3f( 0,+r,+r)).arr ++ Vec3f(-1,0,0).arr ++ carr ++ (p1+Vec3f( 0,+r,-r)).arr ++ Vec3f(-1,0,0).arr ++ carr ++
// top face
(p3+Vec3f(+r, 0,-r)).arr ++ Vec3f(0,+1,0).arr ++ carr ++ (p4+Vec3f(-r, 0,-r)).arr ++ Vec3f(0,+1,0).arr ++ carr ++ (p8+Vec3f(-r, 0,+r)).arr ++ Vec3f(0,+1,0).arr ++ carr ++
(p8+Vec3f(-r, 0,+r)).arr ++ Vec3f(0,+1,0).arr ++ carr ++ (p7+Vec3f(+r, 0,+r)).arr ++ Vec3f(0,+1,0).arr ++ carr ++ (p3+Vec3f(+r, 0,-r)).arr ++ Vec3f(0,+1,0).arr ++ carr ++
// bottom face
(p1+Vec3f(+r, 0,-r)).arr ++ Vec3f(0,-1,0).arr ++ carr ++ (p5+Vec3f(+r, 0,+r)).arr ++ Vec3f(0,-1,0).arr ++ carr ++ (p6+Vec3f(-r, 0,+r)).arr ++ Vec3f(0,-1,0).arr ++ carr ++
(p6+Vec3f(-r, 0,+r)).arr ++ Vec3f(0,-1,0).arr ++ carr ++ (p2+Vec3f(-r, 0,-r)).arr ++ Vec3f(0,-1,0).arr ++ carr ++ (p1+Vec3f(+r, 0,-r)).arr ++ Vec3f(0,-1,0).arr ++ carr
val hwxr = hwx-r
val hwyr = hwy-r
val hwzr = hwz-r
val cylinderY = cylinderVNC(r, hwyr, color, detail*4, true).rawData
val lengthOfBlock = cylinderY.length / 4
val cylinderYp2p4 = Array.tabulate(lengthOfBlock)(i => cylinderY(0*lengthOfBlock + i)).transfromSimpleVNC(Mat4f.translate(+hwxr, 0, +hwzr))
val cylinderYp6p8 = Array.tabulate(lengthOfBlock)(i => cylinderY(1*lengthOfBlock + i)).transfromSimpleVNC(Mat4f.translate(+hwxr, 0, -hwzr))
val cylinderYp5p7 = Array.tabulate(lengthOfBlock)(i => cylinderY(2*lengthOfBlock + i)).transfromSimpleVNC(Mat4f.translate(-hwxr, 0, -hwzr))
val cylinderYp1p3 = Array.tabulate(lengthOfBlock)(i => cylinderY(3*lengthOfBlock + i)).transfromSimpleVNC(Mat4f.translate(-hwxr, 0, +hwzr))
val cylinderX = cylinderVNC(r, hwxr, color, detail*4, true).transfromSimpleVNC(Mat4f.rotate(-90, 0, 0, 1))
val cylinderXp1p2 = Array.tabulate(lengthOfBlock)(i => cylinderX(0*lengthOfBlock + i)).transfromSimpleVNC(Mat4f.translate(0, -hwyr, +hwzr))
val cylinderXp5p6 = Array.tabulate(lengthOfBlock)(i => cylinderX(1*lengthOfBlock + i)).transfromSimpleVNC(Mat4f.translate(0, -hwyr, -hwzr))
val cylinderXp7p8 = Array.tabulate(lengthOfBlock)(i => cylinderX(2*lengthOfBlock + i)).transfromSimpleVNC(Mat4f.translate(0, +hwyr, -hwzr))
val cylinderXp3p4 = Array.tabulate(lengthOfBlock)(i => cylinderX(3*lengthOfBlock + i)).transfromSimpleVNC(Mat4f.translate(0, +hwyr, +hwzr))
val cylinderZ = cylinderVNC(r, hwzr, color, detail*4, true).transfromSimpleVNC(Mat4f.rotate(90, 1, 0, 0))
val cylinderZp2p6 = Array.tabulate(lengthOfBlock)(i => cylinderZ(0*lengthOfBlock + i)).transfromSimpleVNC(Mat4f.translate(+hwxr, -hwyr, 0))
val cylinderZp4p8 = Array.tabulate(lengthOfBlock)(i => cylinderZ(1*lengthOfBlock + i)).transfromSimpleVNC(Mat4f.translate(+hwxr, +hwyr, 0))
val cylinderZp3p7 = Array.tabulate(lengthOfBlock)(i => cylinderZ(2*lengthOfBlock + i)).transfromSimpleVNC(Mat4f.translate(-hwxr, +hwyr, 0))
val cylinderZp1p5 = Array.tabulate(lengthOfBlock)(i => cylinderZ(3*lengthOfBlock + i)).transfromSimpleVNC(Mat4f.translate(-hwxr, -hwyr, 0))
// the following ensures that the sphere uses just the right number of triangles
// in order to math the number of faces used for the cylinder (at least for power-of-2 detail values)
// otherwise there are visible gaps between the cylinder and the sphere parts
val sphereDetail = (math.log(detail) / math.log(2)).toInt
val sphere = sphereVNC(r, color, sphereDetail)
val lengthOneEighth = sphere.length / 8
val sphere1 = Array.tabulate(lengthOneEighth)(i => sphere(0*lengthOneEighth + i)).transfromSimpleVNC(Mat4f.translate(+hwxr, -hwyr, +hwzr))
val sphere2 = Array.tabulate(lengthOneEighth)(i => sphere(1*lengthOneEighth + i)).transfromSimpleVNC(Mat4f.translate(+hwxr, -hwyr, -hwzr))
val sphere3 = Array.tabulate(lengthOneEighth)(i => sphere(2*lengthOneEighth + i)).transfromSimpleVNC(Mat4f.translate(-hwxr, -hwyr, -hwzr))
val sphere4 = Array.tabulate(lengthOneEighth)(i => sphere(3*lengthOneEighth + i)).transfromSimpleVNC(Mat4f.translate(-hwxr, -hwyr, +hwzr))
val sphere5 = Array.tabulate(lengthOneEighth)(i => sphere(4*lengthOneEighth + i)).transfromSimpleVNC(Mat4f.translate(+hwxr, +hwyr, +hwzr))
val sphere6 = Array.tabulate(lengthOneEighth)(i => sphere(5*lengthOneEighth + i)).transfromSimpleVNC(Mat4f.translate(+hwxr, +hwyr, -hwzr))
val sphere7 = Array.tabulate(lengthOneEighth)(i => sphere(6*lengthOneEighth + i)).transfromSimpleVNC(Mat4f.translate(-hwxr, +hwyr, -hwzr))
val sphere8 = Array.tabulate(lengthOneEighth)(i => sphere(7*lengthOneEighth + i)).transfromSimpleVNC(Mat4f.translate(-hwxr, +hwyr, +hwzr))
return triangles ++ cylinderYp2p4 ++ cylinderYp6p8 ++ cylinderYp5p7 ++ cylinderYp1p3 ++
cylinderXp1p2 ++ cylinderXp5p6 ++ cylinderXp7p8 ++ cylinderXp3p4 ++
cylinderZp2p6 ++ cylinderZp4p8 ++ cylinderZp3p7 ++ cylinderZp1p5 ++
sphere1 ++ sphere2 ++ sphere3 ++ sphere4 ++ sphere5 ++ sphere6 ++ sphere7 ++ sphere8
}
*/
}
| bluenote10/ScalaOculusRiftExample | src/main/scala/com/github/bluenote/VertexData.scala | Scala | apache-2.0 | 21,141 |
package com.trafficland.augmentsbt.generators
import sbt._
import scala.collection.Seq
import java.io.File
import sbt.Keys._
import ConfigurationDirectory.autoImport._
import sbt.plugins.JvmPlugin
object LogbackConfigurationPlugin extends AutoPlugin with FileGenerator {
import autoImport._
override lazy val requires: Plugins = ConfigurationDirectory && JvmPlugin
object autoImport {
val generateLogbackConf: TaskKey[Seq[File]] = TaskKey[Seq[File]](
"generate-logback-conf",
"destructively generates a default logback configuration"
)
val generateLogbackTestConf: TaskKey[Seq[File]] = TaskKey[Seq[File]](
"generate-logback-test-conf",
"destructively generates a default logback test configuration that restarts log files and writes to STDOUT"
)
val logbackTargetFile: SettingKey[File] = SettingKey[File]("logback-target-file")
/**
* The logback-test.xml file destination target.
*/
val logbackTestTargetFile: SettingKey[File] = SettingKey[File]("logback-test-target-file")
}
override lazy val projectSettings = Seq(
logbackTargetFile <<= confDirectory(_ / "logback.xml"),
logbackTestTargetFile <<= confDirectory(_ / "logback-test.xml"),
generateLogbackConf <<= (streams, normalizedName, logbackTargetFile) map { (out, name, tf) =>
generate(out, "logback.xml.template", normalizedNameModification(name), tf)
},
generateLogbackTestConf <<= (streams, normalizedName, logbackTestTargetFile) map { (out, name, tf) =>
generate(out, "logback-test.xml.template", normalizedNameModification(name), tf)
}
)
}
| ereichert/augment-sbt | src/main/scala/com/trafficland/augmentsbt/generators/LogbackConfigurationPlugin.scala | Scala | apache-2.0 | 1,622 |
package peli
import siirrot._
import java.lang.StackOverflowError
import pelikomponentit.Auto
import scala.collection.mutable.Buffer
//Lähteet:
// min-funktion Ordering: http://www.scala-lang.org/old/node/7529
class AI(pelitilanne_ : Pelitilanne) {
var pelitilanne = pelitilanne_
def siirto(): Koordinaatti = {
var kuviteltuTilanne = Pelitilanne.kuvitteellinen(this)
val itse = kuviteltuTilanne.vuorossa
val sijainti = kuviteltuTilanne.pelilauta.etsiAuto(itse.auto)
def etsiParasSiirtoSarja(pelitilanne: Pelitilanne, siirrot: List[Siirto]): (List[Siirto], Int) = {
try {
if (siirrot.size <= 5 ) {
val vuorossa = pelitilanne.vuorossa
var vaihtoehdot = Buffer[(List[Siirto],Int)]()
pelitilanne.kaikkiSallitutSiirrot(vuorossa.auto).foreach{ siirto: Siirto =>
if (siirto.vaihde > vuorossa.auto.vaihde) vuorossa.auto.nostaVaihdetta() //Vaihdetaan siirrolle sopiva vaihde
else if (siirto.vaihde < vuorossa.auto.vaihde) vuorossa.auto.laskeVaihdetta()
pelitilanne.siirraAutoa(siirto.kohdeKoordinaatti) //siirretään autoa
val arvostelu = arvosteleTilanne(pelitilanne, siirrot) //Tarkistetaan onko jompikumpi voittanut
if (arvostelu != 0) vaihtoehdot.append( (siirrot ++ List(siirto), arvostelu) ) //Jos on, palautetaan siirrot ja arvo
else {
val v = etsiParasSiirtoSarja(pelitilanne, siirrot ++ List(siirto)) //Muutoin etsitään paras jatko ja palautetaan se
vaihtoehdot.append( v )
}
pelitilanne.peruSiirto() //Otetaan äskeinen siirto takaisin.
}
//Palautetaan AI:n kannalta paras siirto, jos on AI:n vuoro ja vastustajan kannalta paras siirto (AI:lle huonoin),
//jos on vastustajan vuoro
if (vuorossa == itse) valitseParas(vaihtoehdot)
else valitseHuonoin(vaihtoehdot)
} else {
(siirrot, 0) //Jos laskua jatketaan liian pitkälle, reitin arvo on 0
}
} catch {
case e: StackOverflowError => {
(siirrot, 0) //Jos laskua ei jakseta laskea loppuun, reitin arvo on 0
}
case _: Throwable => (siirrot, 0) //Samoin jos jokin muu virhe sattuu.
}
}
//Jos pelaaja on voittamassa, palautetaan positiivinen luku, kuinka monta kierrosta voittoon menee
//Jos pelaaja on häviämässä, palautetaan negatiivinen luku, kuinka monta kierrosta häviämiseen menee
//Muutoin 0.
def arvosteleTilanne(pelitilanne: Pelitilanne, siirrot: List[Siirto]): Int = {
val mahdollinenVoittaja = pelitilanne.tarkistaVoitto._1
if (mahdollinenVoittaja.isDefined) {
if (mahdollinenVoittaja.get == itse) siirrot.size
else -siirrot.size
}
else 0
}
def valitseParas(lista: Buffer[(List[Siirto],Int)]): (List[Siirto],Int) = {
require(lista.size > 0)
val positiiviset = lista.filter(_._2 > 0)
val neutraalit = lista.filter(_._2 == 0)
val negatiiviset = lista.filter(_._2 < 0)
if (positiiviset.size > 0) positiiviset.min(Ordering.by{pari: (List[Siirto],Int) => pari._2}) //Pienin siirtomäärä on paras voitto
else if (neutraalit.size > 0) neutraalit.max(Ordering.by{pari: (List[Siirto],Int) => pari._1(0).vaihde})
else negatiiviset.min(Ordering.by{pari: (List[Siirto],Int) => pari._2}) //Itseisarvoiltaan suurin -> pisin häviö, on paras.
}
def valitseHuonoin(lista: Buffer[(List[Siirto],Int)]): (List[Siirto],Int) = {
require(lista.size > 0)
val positiiviset = lista.filter(_._2 > 0)
val neutraalit = lista.filter(_._2 == 0)
val negatiiviset = lista.filter(_._2 < 0)
if (negatiiviset.size > 0) negatiiviset.max(Ordering.by{pari: (List[Siirto],Int) => pari._2}) //Nopein häviö on huonoin
else if (neutraalit.size > 0) neutraalit.max(Ordering.by{pari: (List[Siirto],Int) => pari._1(0).vaihde})
else positiiviset.max(Ordering.by{pari: (List[Siirto],Int) => pari._2}) //Hitain voitto on huonoin.
}
val parasSiirtoSarja = etsiParasSiirtoSarja(kuviteltuTilanne, List[Siirto]())
val siirto = parasSiirtoSarja._1(0)
if (siirto.vaihde > pelitilanne.vuorossa.auto.vaihde) pelitilanne.vuorossa.auto.nostaVaihdetta() //Vaihdetaan siirrolle sopiva vaihde
else if (siirto.vaihde < pelitilanne.vuorossa.auto.vaihde) pelitilanne.vuorossa.auto.laskeVaihdetta()
siirto.kohdeKoordinaatti //Palautetaan kohderuutu
}
} | MrAurela/Formula | Formula/src/peli/AI.scala | Scala | mit | 4,582 |
/*
* Copyright 2001-2015 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import scala.collection.GenTraversable
import scala.scalajs.js.timers.SetTimeoutHandle
private[scalatest] class ConcurrentLinkedQueue[T] extends Serializable {
private final val queue = new scala.collection.mutable.ListBuffer[T]
def add(ele: T): Unit = {
queue += ele
}
def iterator: Iterator[T] = queue.iterator
def isEmpty: Boolean = queue.isEmpty
def asScala: GenTraversable[T] = queue
def poll: T = queue.remove(0)
}
private[scalatest] class LinkedBlockingQueue[T] extends Serializable {
private final val queue = new scala.collection.mutable.ListBuffer[T]
def put(ele: T): Unit = queue += ele
def take(): T = queue.remove(0)
def size: Int = queue.size
}
private[scalatest] class CountDownLatch(count: Int) {
private var currentCount: Long = count
def countDown(): Unit = {
currentCount =
if (currentCount > 0)
currentCount - 1
else
0
}
def getCount: Long = currentCount
def await(): Unit =
if (currentCount == 0)
return
else
throw new UnsupportedOperationException("Scala.js is single-threaded!")
}
private[scalatest] object NameTransformer {
def decode(encoded: String): String = encoded
}
private[scalatest] trait TimerTask extends Runnable {
var handle: Option[SetTimeoutHandle] = None
def run()
def cancel(): Unit = {
handle match {
case Some(h) => scala.scalajs.js.timers.clearTimeout(h)
case None =>
}
}
}
private[scalatest] class Timer {
def schedule(task: TimerTask, millis: Long): Unit = {
task.handle =
Some(
scala.scalajs.js.timers.setTimeout(millis) {
task.run()
}
)
}
def cancel(): Unit = () // You can't cancel the one and only timer in javascript.
}
| dotty-staging/scalatest | scalatest.js/src/main/scala/org/scalatest/JavaClassesWrappers.scala | Scala | apache-2.0 | 2,391 |
/*
* Copyright 2015 Webtrends (http://www.webtrends.com)
*
* See the LICENCE.txt file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webtrends.service.sockorest
import akka.util.Timeout
import com.webtrends.harness.command.{Command, CommandBean, CommandException, CommandResponse}
import com.webtrends.harness.component.ComponentHelper
import com.webtrends.harness.component.socko.route.SockoDelete
import com.webtrends.service.Person
import scala.concurrent.duration._
import scala.concurrent.{Future, Promise}
import scala.util.{Failure, Success}
class SDelete extends Command
with SockoDelete
with ComponentHelper {
implicit val executionContext = context.dispatcher
implicit val timeout = Timeout(2 seconds)
override def path: String = "/person/$name"
/**
* Name of the command that will be used for the actor name
*
* @return
*/
override def commandName: String = SDelete.CommandName
/**
* The primary entry point for the command, the actor for this command
* will ignore all other messaging and only execute through this
*
* @return
*/
def execute[T](bean: Option[CommandBean]): Future[CommandResponse[T]] = {
val p = Promise[CommandResponse[T]]
bean match {
case Some(b) =>
getComponent("wookiee-cache-memcache") onComplete {
case Success(actor) =>
// If we were doing a real API we might want to check the cache to see if it
// exists first and if it does then throw some sort of exception, but this is just an example
val personName = b("name").asInstanceOf[String]
Person(personName).deleteFromCache(actor) onComplete {
case Success(s) => p success CommandResponse[T](None, "txt")
case Failure(f) => p failure f
}
case Failure(f) => p failure f
}
case None => p failure new CommandException("SDelete", "Cache not initialized")
}
p.future
}
}
object SDelete {
def CommandName = "SDelete"
} | Webtrends/wookiee-spray | example-rest/src/main/scala/com/webtrends/service/sockorest/SDelete.scala | Scala | apache-2.0 | 2,629 |
/*
* Copyright (c) 2014 Contributor. All rights reserved.
*/
package org.scalaide.debug.internal.expression.proxies.primitives.operations.numeric
import org.junit.Test
import org.scalaide.debug.internal.expression.Names.Java
import org.scalaide.debug.internal.expression.BaseIntegrationTest
import org.scalaide.debug.internal.expression.BaseIntegrationTestCompanion
import org.scalaide.debug.internal.expression.TestValues
class AdditionTest extends BaseIntegrationTest(AdditionTest) {
import TestValues.ValuesTestCase._
@Test
def `byte + sth`(): Unit = {
eval("byte + byte2", byte + byte2, Java.primitives.int)
eval("byte + short2", byte + short2, Java.primitives.int)
eval("byte + char2", byte + char2, Java.primitives.int)
eval("byte + int2", byte + int2, Java.primitives.int)
eval("byte + long2", byte + long2, Java.primitives.long)
eval("byte + float", byte + float, Java.primitives.float)
eval("byte + double", byte + double, Java.primitives.double)
}
@Test
def `short + sth`(): Unit = {
eval("short + byte2", short + byte2, Java.primitives.int)
eval("short + short2", short + short2, Java.primitives.int)
eval("short + char2", short + char2, Java.primitives.int)
eval("short + int2", short + int2, Java.primitives.int)
eval("short + long2", short + long2, Java.primitives.long)
eval("short + float", short + float, Java.primitives.float)
eval("short + double", short + double, Java.primitives.double)
}
@Test
def `char + sth`(): Unit = {
eval("char + byte2", char + byte2, Java.primitives.int)
eval("char + short2", char + short2, Java.primitives.int)
eval("char + char2", char + char2, Java.primitives.int)
eval("char + int2", char + int2, Java.primitives.int)
eval("char + long2", char + long2, Java.primitives.long)
eval("char + float", char + float, Java.primitives.float)
eval("char + double", char + double, Java.primitives.double)
}
@Test
def `int + sth`(): Unit = {
eval("int + byte2", int + byte2, Java.primitives.int)
eval("int + short2", int + short2, Java.primitives.int)
eval("int + char", int + char, Java.primitives.int)
eval("int + int2", int + int2, Java.primitives.int)
eval("int + long2", int + long2, Java.primitives.long)
eval("int + float", int + float, Java.primitives.float)
eval("int + double", int + double, Java.primitives.double)
}
@Test
def `long + sth`(): Unit = {
eval("long + byte2", long + byte2, Java.primitives.long)
eval("long + short2", long + short2, Java.primitives.long)
eval("long + char", long + char, Java.primitives.long)
eval("long + int2", long + int2, Java.primitives.long)
eval("long + long2", long + long2, Java.primitives.long)
eval("long + float", long + float, Java.primitives.float)
eval("long + double", long + double, Java.primitives.double)
}
@Test
def `float + sth`(): Unit = {
eval("float + byte2", float + byte2, Java.primitives.float)
eval("float + short2", float + short2, Java.primitives.float)
eval("float + char", float + char, Java.primitives.float)
eval("float + int2", float + int2, Java.primitives.float)
eval("float + long2", float + long2, Java.primitives.float)
eval("float + float2", float + float2, Java.primitives.float)
eval("float + double", float + double, Java.primitives.double)
}
@Test
def `double + sth`(): Unit = {
eval("double + byte2", double + byte2, Java.primitives.double)
eval("double + short2", double + short2, Java.primitives.double)
eval("double + char", double + char, Java.primitives.double)
eval("double + int2", double + int2, Java.primitives.double)
eval("double + long2", double + long2, Java.primitives.double)
eval("double + float", double + float, Java.primitives.double)
eval("double + double2", double + double2, Java.primitives.double)
}
}
object AdditionTest extends BaseIntegrationTestCompanion
| Kwestor/scala-ide | org.scala-ide.sdt.debug.expression.tests/src/org/scalaide/debug/internal/expression/proxies/primitives/operations/numeric/AdditionTest.scala | Scala | bsd-3-clause | 3,961 |
package monocle.function
import monocle.MonocleSuite
import scala.annotation.nowarn
@nowarn
class InitExample extends MonocleSuite {
test("init creates a Lens from a 2-6 tuple to its tail") {
assertEquals(((2, false) applyLens init get), 2)
assertEquals((('r', false, "lala", 5.6, 7, 4) applyLens init get), (('r', false, "lala", 5.6, 7)))
assertEquals(((2, false, "hello") applyLens init replace ((4, true))), ((4, true, "hello")))
}
}
| julien-truffaut/Monocle | example/src/test/scala/monocle/function/InitExample.scala | Scala | mit | 457 |
class Context
class ContextBase { def settings = 1 }
class Test {
implicit def toBase(ctx: Context): ContextBase = ???
def test(ctx0: Context) = {
implicit val ctx = { ctx0.settings; ??? } // error
}
def f: Unit = { implicitly[Int]; implicit val i = implicitly[Int] } // error
}
| som-snytt/dotty | tests/neg/i4709.scala | Scala | apache-2.0 | 293 |
/**
*
* PairedDongleTest
* Ledger wallet
*
* Created by Pierre Pollastri on 06/02/15.
*
* The MIT License (MIT)
*
* Copyright (c) 2015 Ledger
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
package com.ledger.ledgerwallet.model
import android.test.InstrumentationTestCase
import com.ledger.ledgerwallet.models.PairedDongle
import com.ledger.ledgerwallet.utils.logs.Logger
import junit.framework.Assert
import org.spongycastle.util.encoders.Hex
import scala.collection.mutable
class PairedDongleTest extends InstrumentationTestCase {
def testShouldCreateAndGet(): Unit = {
implicit val context = getInstrumentation.getTargetContext
val pairingId = "a test pairing id"
val name = "A super name for an amazing dongle"
val pairingKey = Hex.decode("6032d5032c905f39447bc3f28a043a99")
val dongle = PairedDongle.create(pairingId, name, pairingKey)
Assert.assertEquals(pairingId, dongle.id.get)
Assert.assertEquals(name, dongle.name.get)
Assert.assertEquals(Hex.toHexString(pairingKey), Hex.toHexString(dongle.pairingKey.get.secret))
}
def testShouldCreateAndGetFromPreferences(): Unit = {
implicit val context = getInstrumentation.getTargetContext
val pairingId = "a test pairing id"
val name = "A super name for an amazing dongle"
val pairingKey = Hex.decode("6032d5032c905f39447bc3f28a043a99")
PairedDongle.create(pairingId, name, pairingKey)
val dongle = PairedDongle.get(pairingId)
Assert.assertTrue(dongle.isDefined)
Assert.assertEquals(pairingId, dongle.get.id.get)
Assert.assertEquals(name, dongle.get.name.get)
Assert.assertEquals(Hex.toHexString(pairingKey), Hex.toHexString(dongle.get.pairingKey.get.secret))
Logger.d(Hex.toHexString(pairingKey) + " <> " + Hex.toHexString(dongle.get.pairingKey.get.secret))
}
def testShouldCreateAndGetFromPreferencesMuktipleDongle(): Unit = {
implicit val context = getInstrumentation.getTargetContext
val testSet = mutable.Map[String, (String, String)]()
testSet("a test pairing id") = ("A super name for an amazing dongle", "6032d5032c905f39447bc3f28a043a99")
testSet("pairing_1") = ("A first name", "aa32d5032c905f39447bc3f28a043a994ccddb8f")
testSet("pairing_2") = ("A second name", "bb32d5032c905f39447bc3f28a043a994ccddb8f")
testSet("pairing_3") = ("A third name", "cc32d5032c905f39447bc3f28a043a994ccddb8f")
testSet("pairing_4") = ("A fourth name", "dd32d5032c905f39447bc3f28a043a994ccddb8f")
testSet("pairing_5") = ("A fifth name", "ee32d5032c905f39447bc3f28a043a994ccddb8f")
testSet("pairing_6") = ("A sixth name", "ff32d5032c905f39447bc3f28a043a994ccddb8f")
testSet foreach {
case (id, value) =>
PairedDongle.create(id, value._1, Hex.decode(value._2))
}
val dongles = PairedDongle.all
Assert.assertEquals(testSet.size, dongles.length)
for (dongle <- dongles) {
val sample = testSet(dongle.id.get)
Assert.assertNotNull(sample)
Assert.assertEquals(sample._1, dongle.name.get)
Assert.assertEquals(sample._2, Hex.toHexString(dongle.pairingKey.get.secret))
}
}
}
| Morveus/ledger-wallet-android | app/src/androidTest/scala/com/ledger/ledgerwallet/model/PairedDongleTest.scala | Scala | mit | 4,146 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package play.filters.csrf
import java.net.{ URLDecoder, URLEncoder }
import java.util.Locale
import javax.inject.Inject
import akka.stream._
import akka.stream.scaladsl.{ Flow, Keep, Sink, Source }
import akka.stream.stage._
import akka.util.ByteString
import play.api.http.HeaderNames._
import play.api.http.SessionConfiguration
import play.api.libs.crypto.CSRFTokenSigner
import play.api.libs.streams.Accumulator
import play.api.libs.typedmap.TypedMap
import play.api.mvc._
import play.core.parsers.Multipart
import play.filters.cors.CORSFilter
import play.filters.csrf.CSRF._
import play.libs.typedmap.{ TypedEntry, TypedKey }
import play.mvc.Http.RequestBuilder
import scala.concurrent.Future
/**
* An action that provides CSRF protection.
*
* @param config The CSRF configuration.
* @param tokenSigner The CSRF token signer.
* @param tokenProvider A token provider to use.
* @param next The composed action that is being protected.
* @param errorHandler handling failed token error.
*/
class CSRFAction(
next: EssentialAction,
config: CSRFConfig = CSRFConfig(),
tokenSigner: CSRFTokenSigner,
tokenProvider: TokenProvider,
sessionConfiguration: SessionConfiguration,
errorHandler: => ErrorHandler = CSRF.DefaultErrorHandler)(implicit mat: Materializer) extends EssentialAction {
import play.core.Execution.Implicits.trampoline
lazy val csrfActionHelper = new CSRFActionHelper(sessionConfiguration, config, tokenSigner)
private def checkFailed(req: RequestHeader, msg: String): Accumulator[ByteString, Result] =
Accumulator.done(csrfActionHelper.clearTokenIfInvalid(req, errorHandler, msg))
def apply(untaggedRequest: RequestHeader) = {
val request = csrfActionHelper.tagRequestFromHeader(untaggedRequest)
// this function exists purely to aid readability
def continue = next(request)
// Only filter unsafe methods and content types
if (config.checkMethod(request.method) && config.checkContentType(request.contentType)) {
if (!csrfActionHelper.requiresCsrfCheck(request)) {
continue
} else {
// Only proceed with checks if there is an incoming token in the header, otherwise there's no point
csrfActionHelper.getTokenToValidate(request).map { headerToken =>
// First check if there's a token in the query string or header, if we find one, don't bother handling the body
csrfActionHelper.getHeaderToken(request).map { queryStringToken =>
if (tokenProvider.compareTokens(headerToken, queryStringToken)) {
filterLogger.trace("[CSRF] Valid token found in query string")
continue
} else {
filterLogger.trace("[CSRF] Check failed because invalid token found in query string: " + queryStringToken)
checkFailed(request, "Bad CSRF token found in query String")
}
} getOrElse {
// Check the body
request.contentType match {
case Some("application/x-www-form-urlencoded") =>
filterLogger.trace(s"[CSRF] Check form body with url encoding")
checkFormBody(request, next, headerToken, config.tokenName)
case Some("multipart/form-data") =>
filterLogger.trace(s"[CSRF] Check form body with multipart")
checkMultipartBody(request, next, headerToken, config.tokenName)
// No way to extract token from other content types
case Some(content) =>
filterLogger.trace(s"[CSRF] Check failed because $content request")
checkFailed(request, s"No CSRF token found for $content body")
case None =>
filterLogger.trace(s"[CSRF] Check failed because request without content type")
checkFailed(request, s"No CSRF token found for body without content type")
}
}
} getOrElse {
filterLogger.trace("[CSRF] Check failed because no token found in headers")
checkFailed(request, "No CSRF token found in headers")
}
}
} else if (csrfActionHelper.getTokenToValidate(request).isEmpty && config.createIfNotFound(request)) {
// No token in header and we have to create one if not found, so create a new token
val newToken = tokenProvider.generateToken
// The request
val requestWithNewToken = csrfActionHelper.tagRequest(request, Token(config.tokenName, newToken))
// Once done, add it to the result
next(requestWithNewToken).map(result =>
csrfActionHelper.addTokenToResponse(newToken, request, result))
} else {
filterLogger.trace("[CSRF] No check necessary")
next(request)
}
}
private def checkFormBody: (RequestHeader, EssentialAction, String, String) => Accumulator[ByteString, Result] = checkBody(extractTokenFromFormBody) _
private def checkMultipartBody(request: RequestHeader, action: EssentialAction, tokenFromHeader: String, tokenName: String) = {
(for {
mt <- request.mediaType
maybeBoundary <- mt.parameters.find(_._1.equalsIgnoreCase("boundary"))
boundary <- maybeBoundary._2
} yield {
checkBody(extractTokenFromMultipartFormDataBody(ByteString(boundary)))(request, action, tokenFromHeader, tokenName)
}).getOrElse(checkFailed(request, "No boundary found in multipart/form-data request"))
}
private def checkBody[T](extractor: (ByteString, String) => Option[String])(request: RequestHeader, action: EssentialAction, tokenFromHeader: String, tokenName: String) = {
// We need to ensure that the action isn't actually executed until the body is validated.
// To do that, we use Flow.splitWhen(_ => false). This basically says, give me a Source
// containing all the elements when you receive the first element. Our BodyHandler doesn't
// output any part of the body until it has validated the CSRF check, so we know that
// the source is validated. Then using a Sink.head, we turn that Source into an Accumulator,
// which we can then map to execute and feed into our action.
// CSRF check failures are used by failing the stream with a NoTokenInBody exception.
Accumulator(
Flow[ByteString]
.via(new BodyHandler(config, { body =>
if (extractor(body, tokenName).fold(false)(tokenProvider.compareTokens(_, tokenFromHeader))) {
filterLogger.trace("[CSRF] Valid token found in body")
true
} else {
filterLogger.trace("[CSRF] Check failed because no or invalid token found in body")
false
}
}))
.splitWhen(_ => false)
.prefixAndTail(0) // TODO rewrite BodyHandler such that it emits sub-source then we can avoid all these dancing around
.map(_._2)
.concatSubstreams
.toMat(Sink.head[Source[ByteString, _]])(Keep.right)
).mapFuture { validatedBodySource =>
filterLogger.trace(s"[CSRF] running with validated body source")
action(request).run(validatedBodySource)
}.recoverWith {
case NoTokenInBody =>
filterLogger.trace("[CSRF] Check failed with NoTokenInBody")
csrfActionHelper.clearTokenIfInvalid(request, errorHandler, "No CSRF token found in body")
}
}
/**
* Does a very simple parse of the form body to find the token, if it exists.
*/
private def extractTokenFromFormBody(body: ByteString, tokenName: String): Option[String] = {
val tokenEquals = ByteString(URLEncoder.encode(tokenName, "utf-8")) ++ ByteString('=')
// First check if it's the first token
if (body.startsWith(tokenEquals)) {
Some(URLDecoder.decode(body.drop(tokenEquals.size).takeWhile(_ != '&').utf8String, "utf-8"))
} else {
val andTokenEquals = ByteString('&') ++ tokenEquals
val index = body.indexOfSlice(andTokenEquals)
if (index == -1) {
None
} else {
Some(URLDecoder.decode(body.drop(index + andTokenEquals.size).takeWhile(_ != '&').utf8String, "utf-8"))
}
}
}
/**
* Does a very simple multipart/form-data parse to find the token if it exists.
*/
private def extractTokenFromMultipartFormDataBody(boundary: ByteString)(body: ByteString, tokenName: String): Option[String] = {
val crlf = ByteString("\\r\\n")
val boundaryLine = ByteString("\\r\\n--") ++ boundary
/**
* A boundary will start with CRLF, unless it's the first boundary in the body. So that we don't have to handle
* the first boundary differently, prefix the whole body with CRLF.
*/
val prefixedBody = crlf ++ body
/**
* Extract the headers from the given position.
*
* This is invoked recursively, and exits when it reaches the end of stream, or a blank line (indicating end of
* headers). It returns the headers, and the position of the first byte after the headers. The headers are all
* converted to lower case.
*/
def extractHeaders(position: Int): (Int, List[(String, String)]) = {
// If it starts with CRLF, we've reached the end of the headers
if (prefixedBody.startsWith(crlf, position)) {
(position + 2) -> Nil
} else {
// Read up to the next CRLF
val nextCrlf = prefixedBody.indexOfSlice(crlf, position)
if (nextCrlf == -1) {
// Technically this is a protocol error
position -> Nil
} else {
val header = prefixedBody.slice(position, nextCrlf).utf8String
header.split(":", 2) match {
case Array(_) =>
// Bad header, ignore
extractHeaders(nextCrlf + 2)
case Array(key, value) =>
val (endIndex, headers) = extractHeaders(nextCrlf + 2)
endIndex -> ((key.trim().toLowerCase(Locale.ENGLISH) -> value.trim()) :: headers)
}
}
}
}
/**
* Find the token.
*
* This is invoked recursively, once for each part found. It finds the start of the next part, then extracts
* the headers, and if the header has a name of our token name, then it extracts the body, and returns that,
* otherwise it moves onto the next part.
*/
def findToken(position: Int): Option[String] = {
// Find the next boundary from position
prefixedBody.indexOfSlice(boundaryLine, position) match {
case -1 => None
case nextBoundary =>
// Progress past the CRLF at the end of the boundary
val nextCrlf = prefixedBody.indexOfSlice(crlf, nextBoundary + boundaryLine.size)
if (nextCrlf == -1) {
None
} else {
val startOfNextPart = nextCrlf + 2
// Extract the headers
val (startOfPartData, headers) = extractHeaders(startOfNextPart)
headers.toMap match {
case Multipart.PartInfoMatcher(name) if name == tokenName =>
// This part is the token, find the next boundary
val endOfData = prefixedBody.indexOfSlice(boundaryLine, startOfPartData)
if (endOfData == -1) {
None
} else {
// Extract the token value
Some(prefixedBody.slice(startOfPartData, endOfData).utf8String)
}
case _ =>
// Find the next part
findToken(startOfPartData)
}
}
}
}
findToken(0)
}
}
/**
* A body handler.
*
* This will buffer the body until it reaches the end of stream, or until the buffer limit is reached.
*
* Once it has finished buffering, it will attempt to find the token in the body, and if it does, validates it,
* failing the stream if it's invalid. If it's valid, it forwards the buffered body, and then stops buffering and
* continues forwarding the body as is (or finishes if the stream was finished).
*/
private class BodyHandler(config: CSRFConfig, checkBody: ByteString => Boolean) extends GraphStage[FlowShape[ByteString, ByteString]] {
private val PostBodyBufferMax = config.postBodyBuffer
val in: Inlet[ByteString] = Inlet("BodyHandler.in")
val out: Outlet[ByteString] = Outlet("BodyHandler.out")
override val shape = FlowShape(in, out)
override def createLogic(inheritedAttributes: Attributes): GraphStageLogic =
new GraphStageLogic(shape) with OutHandler with InHandler with StageLogging {
var buffer: ByteString = ByteString.empty
var next: ByteString = _
def continueHandler = new InHandler with OutHandler {
override def onPush(): Unit = push(out, grab(in))
override def onPull(): Unit = pull(in)
override def onUpstreamFinish(): Unit = {
if (next == null) completeStage()
}
}
def onPush(): Unit = {
val elem = grab(in)
if (exceededBufferLimit(elem)) {
// We've finished buffering up to the configured limit, try to validate
buffer ++= elem
if (checkBody(buffer)) {
// Switch to continue, and push the buffer
setHandlers(in, out, continueHandler)
if (!(isClosed(in) || hasBeenPulled(in))) {
val toPush = buffer
buffer = null
push(out, toPush)
pull(in)
} else {
next = buffer
buffer = null
}
} else {
// CSRF check failed
failStage(NoTokenInBody)
}
} else {
// Buffer
buffer ++= elem
pull(in)
}
}
def onPull(): Unit = {
if (!hasBeenPulled(in)) pull(in)
}
override def onUpstreamFinish(): Unit = {
// CSRF check
if (checkBody(buffer)) emit(out, buffer, () => completeStage())
else failStage(NoTokenInBody)
}
private def exceededBufferLimit(elem: ByteString) = {
buffer.size + elem.size > PostBodyBufferMax
}
setHandlers(in, out, this)
}
}
private[csrf] object NoTokenInBody extends RuntimeException(null, null, false, false)
class CSRFActionHelper(
sessionConfiguration: SessionConfiguration,
csrfConfig: CSRFConfig,
tokenSigner: CSRFTokenSigner
) {
/**
* Get the header token, that is, the token that should be validated.
*/
def getTokenToValidate(request: RequestHeader) = {
val attrToken = CSRF.getToken(request).map(_.value)
val cookieToken = csrfConfig.cookieName.flatMap(cookie => request.cookies.get(cookie).map(_.value))
val sessionToken = request.session.get(csrfConfig.tokenName)
cookieToken orElse sessionToken orElse attrToken filter { token =>
// return None if the token is invalid
!csrfConfig.signTokens || tokenSigner.extractSignedToken(token).isDefined
}
}
/**
* Tag incoming requests with the token in the header
*/
def tagRequestFromHeader(request: RequestHeader): RequestHeader = {
getTokenToValidate(request).fold(request) { tokenValue =>
val token = Token(csrfConfig.tokenName, tokenValue)
val newReq = tagRequest(request, token)
if (csrfConfig.signTokens) {
// Extract the signed token, and then resign it. This makes the token random per request, preventing the BREACH
// vulnerability
val newTokenValue = tokenSigner.extractSignedToken(token.value).map(tokenSigner.signToken)
newTokenValue.fold(newReq)(tv =>
newReq.addAttr(Token.InfoAttr, TokenInfo(token, tv))
)
} else {
newReq
}
}
}
def tagRequestFromHeader[A](request: Request[A]): Request[A] = {
Request(tagRequestFromHeader(request: RequestHeader), request.body)
}
def tagRequest(request: RequestHeader, token: Token): RequestHeader = {
request.addAttr(Token.InfoAttr, TokenInfo(token))
}
def tagRequest[A](request: Request[A], token: Token): Request[A] = {
Request(tagRequest(request: RequestHeader, token), request.body)
}
def tagRequest(requestBuilder: RequestBuilder, token: Token): RequestBuilder = {
requestBuilder.attr(new TypedKey(Token.InfoAttr), TokenInfo(token))
}
def getHeaderToken(request: RequestHeader) = {
val queryStringToken = request.getQueryString(csrfConfig.tokenName)
val headerToken = request.headers.get(csrfConfig.headerName)
queryStringToken orElse headerToken
}
def requiresCsrfCheck(request: RequestHeader): Boolean = {
if (csrfConfig.bypassCorsTrustedOrigins && request.tags.contains(CORSFilter.RequestTag)) {
filterLogger.trace("[CSRF] Bypassing check because CORSFilter request tag found")
false
} else {
csrfConfig.shouldProtect(request)
}
}
def addTokenToResponse(newToken: String, request: RequestHeader, result: Result) = {
if (isCached(result)) {
filterLogger.trace("[CSRF] Not adding token to cached response")
result
} else {
filterLogger.trace("[CSRF] Adding token to result: " + result)
csrfConfig.cookieName.map {
// cookie
name =>
result.withCookies(Cookie(name, newToken, path = sessionConfiguration.path, domain = sessionConfiguration.domain,
secure = csrfConfig.secureCookie, httpOnly = csrfConfig.httpOnlyCookie))
} getOrElse {
val newSession = result.session(request) + (csrfConfig.tokenName -> newToken)
result.withSession(newSession)
}
}
}
def isCached(result: Result): Boolean =
result.header.headers.get(CACHE_CONTROL).fold(false)(!_.contains("no-cache"))
def clearTokenIfInvalid(request: RequestHeader, errorHandler: ErrorHandler, msg: String): Future[Result] = {
import play.core.Execution.Implicits.trampoline
errorHandler.handle(request, msg) map { result =>
CSRF.getToken(request).fold(
csrfConfig.cookieName.flatMap { cookie =>
request.cookies.get(cookie).map { token =>
result.discardingCookies(
DiscardingCookie(cookie, domain = sessionConfiguration.domain, path = sessionConfiguration.path, secure = csrfConfig.secureCookie))
}
}.getOrElse {
result.withSession(result.session(request) - csrfConfig.tokenName)
}
)(_ => result)
}
}
}
/**
* CSRF check action.
*
* Apply this to all actions that require a CSRF check.
*/
case class CSRFCheck @Inject() (config: CSRFConfig, tokenSigner: CSRFTokenSigner, sessionConfiguration: SessionConfiguration) {
private class CSRFCheckAction[A](
tokenProvider: TokenProvider,
errorHandler: ErrorHandler,
wrapped: Action[A],
csrfActionHelper: CSRFActionHelper
) extends Action[A] {
def parser = wrapped.parser
def executionContext = wrapped.executionContext
def apply(untaggedRequest: Request[A]) = {
val request = csrfActionHelper.tagRequestFromHeader(untaggedRequest)
// Maybe bypass
if (!csrfActionHelper.requiresCsrfCheck(request) || !config.checkContentType(request.contentType)) {
wrapped(request)
} else {
// Get token from header
csrfActionHelper.getTokenToValidate(request).flatMap { headerToken =>
// Get token from query string
csrfActionHelper.getHeaderToken(request)
// Or from body if not found
.orElse({
val form = request.body match {
case body: play.api.mvc.AnyContent if body.asFormUrlEncoded.isDefined => body.asFormUrlEncoded.get
case body: play.api.mvc.AnyContent if body.asMultipartFormData.isDefined => body.asMultipartFormData.get.asFormUrlEncoded
case body: Map[_, _] => body.asInstanceOf[Map[String, Seq[String]]]
case body: play.api.mvc.MultipartFormData[_] => body.asFormUrlEncoded
case _ => Map.empty[String, Seq[String]]
}
form.get(config.tokenName).flatMap(_.headOption)
})
// Execute if it matches
.collect {
case queryToken if tokenProvider.compareTokens(queryToken, headerToken) => wrapped(request)
}
}.getOrElse {
csrfActionHelper.clearTokenIfInvalid(request, errorHandler, "CSRF token check failed")
}
}
}
}
/**
* Wrap an action in a CSRF check.
*/
def apply[A](action: Action[A], errorHandler: ErrorHandler): Action[A] =
new CSRFCheckAction(new TokenProviderProvider(config, tokenSigner).get, errorHandler, action, new CSRFActionHelper(sessionConfiguration, config, tokenSigner))
/**
* Wrap an action in a CSRF check.
*/
def apply[A](action: Action[A]): Action[A] =
new CSRFCheckAction(new TokenProviderProvider(config, tokenSigner).get, CSRF.DefaultErrorHandler, action, new CSRFActionHelper(sessionConfiguration, config, tokenSigner))
}
/**
* CSRF add token action.
*
* Apply this to all actions that render a form that contains a CSRF token.
*/
case class CSRFAddToken @Inject() (config: CSRFConfig, crypto: CSRFTokenSigner, sessionConfiguration: SessionConfiguration) {
private class CSRFAddTokenAction[A](
config: CSRFConfig,
tokenProvider: TokenProvider,
wrapped: Action[A],
csrfActionHelper: CSRFActionHelper
) extends Action[A] {
def parser = wrapped.parser
def executionContext = wrapped.executionContext
def apply(untaggedRequest: Request[A]) = {
val request = csrfActionHelper.tagRequestFromHeader(untaggedRequest)
if (csrfActionHelper.getTokenToValidate(request).isEmpty) {
// No token in header and we have to create one if not found, so create a new token
val newToken = tokenProvider.generateToken
// The request
val requestWithNewToken = csrfActionHelper.tagRequest(request, Token(config.tokenName, newToken))
// Once done, add it to the result
import play.core.Execution.Implicits.trampoline
wrapped(requestWithNewToken).map(result =>
csrfActionHelper.addTokenToResponse(newToken, request, result))
} else {
wrapped(request)
}
}
}
/**
* Wrap an action in an action that ensures there is a CSRF token.
*/
def apply[A](action: Action[A]): Action[A] =
new CSRFAddTokenAction(config, new TokenProviderProvider(config, crypto).get, action, new CSRFActionHelper(sessionConfiguration, config, crypto))
}
| aradchykov/playframework | framework/src/play-filters-helpers/src/main/scala/play/filters/csrf/CSRFActions.scala | Scala | apache-2.0 | 22,445 |
package epam.idobrovolskiy.wikipedia.trending.time
import java.time.Month
import org.scalatest.FunSuite
/**
* Created by Igor_Dobrovolskiy on 02.08.2017.
*/
class WikiDateTest extends FunSuite {
test("Test WikiDate after Ser-De results in source value") {
val wd = WikiDate.AD(123, Month.AUGUST, 31)
assertResult(wd) {
import java.io._
// Serialize
val bo = new ByteArrayOutputStream
val o = new ObjectOutputStream(bo)
o.writeObject(wd)
val bytes = bo.toByteArray
// Deserialize
val bi = new ByteArrayInputStream(bytes)
val i = new ObjectInputStream(bi)
val t = i.readObject.asInstanceOf[WikiDate]
t
}
//Although the test passes, t holds incorrect representation field values (for year, month, day, etc.). Test should be updated after Ser-De issue is fixed in WikiDate class.
}
test("NoDate equals NoDate") {
assertResult(WikiDate.NoDate) {
WikiDate.NoDate
}
}
test("AD(1, January, 1) not equals NoDate") {
assert(WikiDate.AD(1, Month.JANUARY, 1) != WikiDate.NoDate)
}
test("BC(1, December, 31) not equals NoDate") {
assert(WikiDate.BC(1, Month.DECEMBER, 31) != WikiDate.NoDate)
}
test("AD(1, January, 1).serialize not equals NoDate.serialize") {
assert(WikiDate.AD(1, Month.JANUARY, 1).serialize != WikiDate.NoDate.serialize)
}
test("BC(1, December, 31).serialize not equals NoDate.serialize") {
assert(WikiDate.BC(1, Month.DECEMBER, 31).serialize != WikiDate.NoDate.serialize)
}
test("<entrypoint for fast runtime runs/checks (not a test)>") {
val date = WikiDate.deserialize(2233824) // 2129364 //2567655
// println(date)
// println(WikiDate.NoDate.toClosestTraditionalDate, WikiDate.AD(1).toClosestTraditionalDate, WikiDate.BC(1, Month.DECEMBER, 31).toClosestTraditionalDate)
// println(WikiDate.BC(1, Month.DECEMBER, 31).serialize, WikiDate.NoDate.serialize, WikiDate.AD(1).serialize)
println(WikiDate.BC(50).serialize, WikiDate.AD(50).serialize)
}
}
| igor-dobrovolskiy-epam/wikipedia-analysis-scala-core | src/test/scala/epam/idobrovolskiy/wikipedia/trending/time/WikiDateTest.scala | Scala | apache-2.0 | 2,033 |
package models
import com.novus.salat._
import com.novus.salat.dao.{ SalatDAO, ModelCompanion }
import com.novus.salat.annotations._
import com.mongodb.casbah.Imports._
import mongoContext._
import se.radley.plugin.salat._
import se.radley.plugin.salat.Binders._
import play.api.Play.current
//分为三种status:open,finished,cancled;其中finished,cancled都属于closed
case class OrderControl(
@Key("_id") id: String,
rId: String,
status: String,
endTime: String,
date: String
)
object OrderControl extends ModelCompanion[OrderControl, ObjectId] {
val collection = mongoCollection("OrderControl")
val dao = new SalatDAO[OrderControl, ObjectId](collection = collection) {}
} | kandole/simple_reservation | app/models/OrderControl.scala | Scala | gpl-2.0 | 698 |
package mimir.data
import mimir.algebra.ID
object FileFormat {
type T = ID
val CSV = ID("csv")
val JSON = ID("json")
val XML = ID("com.databricks.spark.xml")
val EXCEL = ID("com.crealytics.spark.excel")
val JDBC = ID("jdbc")
val TEXT = ID("text")
val PARQUET = ID("parquet")
val PDF = ID("mimir.exec.spark.datasource.pdf")
val ORC = ID("orc")
val GOOGLE_SHEETS = ID("mimir.exec.spark.datasource.google.spreadsheet")
val CSV_WITH_ERRORCHECKING = ID("org.apache.spark.sql.execution.datasources.ubodin.csv")
}
| UBOdin/mimir | src/main/scala/mimir/data/FileFormat.scala | Scala | apache-2.0 | 705 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.util.TypeUtils
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.CalendarInterval
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the negated value of `expr`.",
examples = """
Examples:
> SELECT _FUNC_(1);
-1
""")
case class UnaryMinus(child: Expression) extends UnaryExpression
with ExpectsInputTypes with NullIntolerant {
override def inputTypes: Seq[AbstractDataType] = Seq(TypeCollection.NumericAndInterval)
override def dataType: DataType = child.dataType
override def toString: String = s"-$child"
private lazy val numeric = TypeUtils.getNumeric(dataType)
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = dataType match {
case _: DecimalType => defineCodeGen(ctx, ev, c => s"$c.unary_$$minus()")
case dt: NumericType => nullSafeCodeGen(ctx, ev, eval => {
val originValue = ctx.freshName("origin")
// codegen would fail to compile if we just write (-($c))
// for example, we could not write --9223372036854775808L in code
s"""
${CodeGenerator.javaType(dt)} $originValue = (${CodeGenerator.javaType(dt)})($eval);
${ev.value} = (${CodeGenerator.javaType(dt)})(-($originValue));
"""})
case _: CalendarIntervalType => defineCodeGen(ctx, ev, c => s"$c.negate()")
}
protected override def nullSafeEval(input: Any): Any = {
if (dataType.isInstanceOf[CalendarIntervalType]) {
input.asInstanceOf[CalendarInterval].negate()
} else {
numeric.negate(input)
}
}
override def sql: String = s"(- ${child.sql})"
}
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the value of `expr`.")
case class UnaryPositive(child: Expression)
extends UnaryExpression with ExpectsInputTypes with NullIntolerant {
override def prettyName: String = "positive"
override def inputTypes: Seq[AbstractDataType] = Seq(TypeCollection.NumericAndInterval)
override def dataType: DataType = child.dataType
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode =
defineCodeGen(ctx, ev, c => c)
protected override def nullSafeEval(input: Any): Any = input
override def sql: String = s"(+ ${child.sql})"
}
/**
* A function that get the absolute value of the numeric value.
*/
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the absolute value of the numeric value.",
examples = """
Examples:
> SELECT _FUNC_(-1);
1
""")
case class Abs(child: Expression)
extends UnaryExpression with ExpectsInputTypes with NullIntolerant {
override def inputTypes: Seq[AbstractDataType] = Seq(NumericType)
override def dataType: DataType = child.dataType
private lazy val numeric = TypeUtils.getNumeric(dataType)
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = dataType match {
case _: DecimalType =>
defineCodeGen(ctx, ev, c => s"$c.abs()")
case dt: NumericType =>
defineCodeGen(ctx, ev, c => s"(${CodeGenerator.javaType(dt)})(java.lang.Math.abs($c))")
}
protected override def nullSafeEval(input: Any): Any = numeric.abs(input)
}
abstract class BinaryArithmetic extends BinaryOperator with NullIntolerant {
override def dataType: DataType = left.dataType
override lazy val resolved: Boolean = childrenResolved && checkInputDataTypes().isSuccess
/** Name of the function for this expression on a [[Decimal]] type. */
def decimalMethod: String =
sys.error("BinaryArithmetics must override either decimalMethod or genCode")
/** Name of the function for this expression on a [[CalendarInterval]] type. */
def calendarIntervalMethod: String =
sys.error("BinaryArithmetics must override either calendarIntervalMethod or genCode")
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = dataType match {
case _: DecimalType =>
defineCodeGen(ctx, ev, (eval1, eval2) => s"$eval1.$decimalMethod($eval2)")
case CalendarIntervalType =>
defineCodeGen(ctx, ev, (eval1, eval2) => s"$eval1.$calendarIntervalMethod($eval2)")
// byte and short are casted into int when add, minus, times or divide
case ByteType | ShortType =>
defineCodeGen(ctx, ev,
(eval1, eval2) => s"(${CodeGenerator.javaType(dataType)})($eval1 $symbol $eval2)")
case _ =>
defineCodeGen(ctx, ev, (eval1, eval2) => s"$eval1 $symbol $eval2")
}
}
object BinaryArithmetic {
def unapply(e: BinaryArithmetic): Option[(Expression, Expression)] = Some((e.left, e.right))
}
@ExpressionDescription(
usage = "expr1 _FUNC_ expr2 - Returns `expr1`+`expr2`.",
examples = """
Examples:
> SELECT 1 _FUNC_ 2;
3
""")
case class Add(left: Expression, right: Expression) extends BinaryArithmetic {
override def inputType: AbstractDataType = TypeCollection.NumericAndInterval
override def symbol: String = "+"
override def decimalMethod: String = "$plus"
override def calendarIntervalMethod: String = "add"
private lazy val numeric = TypeUtils.getNumeric(dataType)
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
if (dataType.isInstanceOf[CalendarIntervalType]) {
input1.asInstanceOf[CalendarInterval].add(input2.asInstanceOf[CalendarInterval])
} else {
numeric.plus(input1, input2)
}
}
}
@ExpressionDescription(
usage = "expr1 _FUNC_ expr2 - Returns `expr1`-`expr2`.",
examples = """
Examples:
> SELECT 2 _FUNC_ 1;
1
""")
case class Subtract(left: Expression, right: Expression) extends BinaryArithmetic {
override def inputType: AbstractDataType = TypeCollection.NumericAndInterval
override def symbol: String = "-"
override def decimalMethod: String = "$minus"
override def calendarIntervalMethod: String = "subtract"
private lazy val numeric = TypeUtils.getNumeric(dataType)
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
if (dataType.isInstanceOf[CalendarIntervalType]) {
input1.asInstanceOf[CalendarInterval].subtract(input2.asInstanceOf[CalendarInterval])
} else {
numeric.minus(input1, input2)
}
}
}
@ExpressionDescription(
usage = "expr1 _FUNC_ expr2 - Returns `expr1`*`expr2`.",
examples = """
Examples:
> SELECT 2 _FUNC_ 3;
6
""")
case class Multiply(left: Expression, right: Expression) extends BinaryArithmetic {
override def inputType: AbstractDataType = NumericType
override def symbol: String = "*"
override def decimalMethod: String = "$times"
private lazy val numeric = TypeUtils.getNumeric(dataType)
protected override def nullSafeEval(input1: Any, input2: Any): Any = numeric.times(input1, input2)
}
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "expr1 _FUNC_ expr2 - Returns `expr1`/`expr2`. It always performs floating point division.",
examples = """
Examples:
> SELECT 3 _FUNC_ 2;
1.5
> SELECT 2L _FUNC_ 2L;
1.0
""")
// scalastyle:on line.size.limit
case class Divide(left: Expression, right: Expression) extends BinaryArithmetic {
override def inputType: AbstractDataType = TypeCollection(DoubleType, DecimalType)
override def symbol: String = "/"
override def decimalMethod: String = "$div"
override def nullable: Boolean = true
private lazy val div: (Any, Any) => Any = dataType match {
case ft: FractionalType => ft.fractional.asInstanceOf[Fractional[Any]].div
}
override def eval(input: InternalRow): Any = {
val input2 = right.eval(input)
if (input2 == null || input2 == 0) {
null
} else {
val input1 = left.eval(input)
if (input1 == null) {
null
} else {
div(input1, input2)
}
}
}
/**
* Special case handling due to division by 0 => null.
*/
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val eval1 = left.genCode(ctx)
val eval2 = right.genCode(ctx)
val isZero = if (dataType.isInstanceOf[DecimalType]) {
s"${eval2.value}.isZero()"
} else {
s"${eval2.value} == 0"
}
val javaType = CodeGenerator.javaType(dataType)
val divide = if (dataType.isInstanceOf[DecimalType]) {
s"${eval1.value}.$decimalMethod(${eval2.value})"
} else {
s"($javaType)(${eval1.value} $symbol ${eval2.value})"
}
if (!left.nullable && !right.nullable) {
ev.copy(code = s"""
${eval2.code}
boolean ${ev.isNull} = false;
$javaType ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
if ($isZero) {
${ev.isNull} = true;
} else {
${eval1.code}
${ev.value} = $divide;
}""")
} else {
ev.copy(code = s"""
${eval2.code}
boolean ${ev.isNull} = false;
$javaType ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
if (${eval2.isNull} || $isZero) {
${ev.isNull} = true;
} else {
${eval1.code}
if (${eval1.isNull}) {
${ev.isNull} = true;
} else {
${ev.value} = $divide;
}
}""")
}
}
}
@ExpressionDescription(
usage = "expr1 _FUNC_ expr2 - Returns the remainder after `expr1`/`expr2`.",
examples = """
Examples:
> SELECT 2 _FUNC_ 1.8;
0.2
> SELECT MOD(2, 1.8);
0.2
""")
case class Remainder(left: Expression, right: Expression) extends BinaryArithmetic {
override def inputType: AbstractDataType = NumericType
override def symbol: String = "%"
override def decimalMethod: String = "remainder"
override def nullable: Boolean = true
private lazy val integral = dataType match {
case i: IntegralType => i.integral.asInstanceOf[Integral[Any]]
case i: FractionalType => i.asIntegral.asInstanceOf[Integral[Any]]
}
override def eval(input: InternalRow): Any = {
val input2 = right.eval(input)
if (input2 == null || input2 == 0) {
null
} else {
val input1 = left.eval(input)
if (input1 == null) {
null
} else {
input1 match {
case d: Double => d % input2.asInstanceOf[java.lang.Double]
case f: Float => f % input2.asInstanceOf[java.lang.Float]
case _ => integral.rem(input1, input2)
}
}
}
}
/**
* Special case handling for x % 0 ==> null.
*/
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val eval1 = left.genCode(ctx)
val eval2 = right.genCode(ctx)
val isZero = if (dataType.isInstanceOf[DecimalType]) {
s"${eval2.value}.isZero()"
} else {
s"${eval2.value} == 0"
}
val javaType = CodeGenerator.javaType(dataType)
val remainder = if (dataType.isInstanceOf[DecimalType]) {
s"${eval1.value}.$decimalMethod(${eval2.value})"
} else {
s"($javaType)(${eval1.value} $symbol ${eval2.value})"
}
if (!left.nullable && !right.nullable) {
ev.copy(code = s"""
${eval2.code}
boolean ${ev.isNull} = false;
$javaType ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
if ($isZero) {
${ev.isNull} = true;
} else {
${eval1.code}
${ev.value} = $remainder;
}""")
} else {
ev.copy(code = s"""
${eval2.code}
boolean ${ev.isNull} = false;
$javaType ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
if (${eval2.isNull} || $isZero) {
${ev.isNull} = true;
} else {
${eval1.code}
if (${eval1.isNull}) {
${ev.isNull} = true;
} else {
${ev.value} = $remainder;
}
}""")
}
}
}
@ExpressionDescription(
usage = "_FUNC_(expr1, expr2) - Returns the positive value of `expr1` mod `expr2`.",
examples = """
Examples:
> SELECT _FUNC_(10, 3);
1
> SELECT _FUNC_(-10, 3);
2
""")
case class Pmod(left: Expression, right: Expression) extends BinaryArithmetic {
override def toString: String = s"pmod($left, $right)"
override def symbol: String = "pmod"
protected def checkTypesInternal(t: DataType): TypeCheckResult =
TypeUtils.checkForNumericExpr(t, "pmod")
override def inputType: AbstractDataType = NumericType
override def nullable: Boolean = true
override def eval(input: InternalRow): Any = {
val input2 = right.eval(input)
if (input2 == null || input2 == 0) {
null
} else {
val input1 = left.eval(input)
if (input1 == null) {
null
} else {
input1 match {
case i: Integer => pmod(i, input2.asInstanceOf[java.lang.Integer])
case l: Long => pmod(l, input2.asInstanceOf[java.lang.Long])
case s: Short => pmod(s, input2.asInstanceOf[java.lang.Short])
case b: Byte => pmod(b, input2.asInstanceOf[java.lang.Byte])
case f: Float => pmod(f, input2.asInstanceOf[java.lang.Float])
case d: Double => pmod(d, input2.asInstanceOf[java.lang.Double])
case d: Decimal => pmod(d, input2.asInstanceOf[Decimal])
}
}
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val eval1 = left.genCode(ctx)
val eval2 = right.genCode(ctx)
val isZero = if (dataType.isInstanceOf[DecimalType]) {
s"${eval2.value}.isZero()"
} else {
s"${eval2.value} == 0"
}
val remainder = ctx.freshName("remainder")
val javaType = CodeGenerator.javaType(dataType)
val result = dataType match {
case DecimalType.Fixed(_, _) =>
val decimalAdd = "$plus"
s"""
$javaType $remainder = ${eval1.value}.remainder(${eval2.value});
if ($remainder.compare(new org.apache.spark.sql.types.Decimal().set(0)) < 0) {
${ev.value}=($remainder.$decimalAdd(${eval2.value})).remainder(${eval2.value});
} else {
${ev.value}=$remainder;
}
"""
// byte and short are casted into int when add, minus, times or divide
case ByteType | ShortType =>
s"""
$javaType $remainder = ($javaType)(${eval1.value} % ${eval2.value});
if ($remainder < 0) {
${ev.value}=($javaType)(($remainder + ${eval2.value}) % ${eval2.value});
} else {
${ev.value}=$remainder;
}
"""
case _ =>
s"""
$javaType $remainder = ${eval1.value} % ${eval2.value};
if ($remainder < 0) {
${ev.value}=($remainder + ${eval2.value}) % ${eval2.value};
} else {
${ev.value}=$remainder;
}
"""
}
if (!left.nullable && !right.nullable) {
ev.copy(code = s"""
${eval2.code}
boolean ${ev.isNull} = false;
$javaType ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
if ($isZero) {
${ev.isNull} = true;
} else {
${eval1.code}
$result
}""")
} else {
ev.copy(code = s"""
${eval2.code}
boolean ${ev.isNull} = false;
$javaType ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
if (${eval2.isNull} || $isZero) {
${ev.isNull} = true;
} else {
${eval1.code}
if (${eval1.isNull}) {
${ev.isNull} = true;
} else {
$result
}
}""")
}
}
private def pmod(a: Int, n: Int): Int = {
val r = a % n
if (r < 0) {(r + n) % n} else r
}
private def pmod(a: Long, n: Long): Long = {
val r = a % n
if (r < 0) {(r + n) % n} else r
}
private def pmod(a: Byte, n: Byte): Byte = {
val r = a % n
if (r < 0) {((r + n) % n).toByte} else r.toByte
}
private def pmod(a: Double, n: Double): Double = {
val r = a % n
if (r < 0) {(r + n) % n} else r
}
private def pmod(a: Short, n: Short): Short = {
val r = a % n
if (r < 0) {((r + n) % n).toShort} else r.toShort
}
private def pmod(a: Float, n: Float): Float = {
val r = a % n
if (r < 0) {(r + n) % n} else r
}
private def pmod(a: Decimal, n: Decimal): Decimal = {
val r = a % n
if (r != null && r.compare(Decimal.ZERO) < 0) {(r + n) % n} else r
}
override def sql: String = s"$prettyName(${left.sql}, ${right.sql})"
}
/**
* A function that returns the least value of all parameters, skipping null values.
* It takes at least 2 parameters, and returns null iff all parameters are null.
*/
@ExpressionDescription(
usage = "_FUNC_(expr, ...) - Returns the least value of all parameters, skipping null values.",
examples = """
Examples:
> SELECT _FUNC_(10, 9, 2, 4, 3);
2
""")
case class Least(children: Seq[Expression]) extends Expression {
override def nullable: Boolean = children.forall(_.nullable)
override def foldable: Boolean = children.forall(_.foldable)
private lazy val ordering = TypeUtils.getInterpretedOrdering(dataType)
override def checkInputDataTypes(): TypeCheckResult = {
if (children.length <= 1) {
TypeCheckResult.TypeCheckFailure(
s"input to function $prettyName requires at least two arguments")
} else if (children.map(_.dataType).distinct.count(_ != NullType) > 1) {
TypeCheckResult.TypeCheckFailure(
s"The expressions should all have the same type," +
s" got LEAST(${children.map(_.dataType.simpleString).mkString(", ")}).")
} else {
TypeUtils.checkForOrderingExpr(dataType, s"function $prettyName")
}
}
override def dataType: DataType = children.head.dataType
override def eval(input: InternalRow): Any = {
children.foldLeft[Any](null)((r, c) => {
val evalc = c.eval(input)
if (evalc != null) {
if (r == null || ordering.lt(evalc, r)) evalc else r
} else {
r
}
})
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val evalChildren = children.map(_.genCode(ctx))
ev.isNull = JavaCode.isNullGlobal(ctx.addMutableState(CodeGenerator.JAVA_BOOLEAN, ev.isNull))
val evals = evalChildren.map(eval =>
s"""
|${eval.code}
|${ctx.reassignIfSmaller(dataType, ev, eval)}
""".stripMargin
)
val resultType = CodeGenerator.javaType(dataType)
val codes = ctx.splitExpressionsWithCurrentInputs(
expressions = evals,
funcName = "least",
extraArguments = Seq(resultType -> ev.value),
returnType = resultType,
makeSplitFunction = body =>
s"""
|$body
|return ${ev.value};
""".stripMargin,
foldFunctions = _.map(funcCall => s"${ev.value} = $funcCall;").mkString("\\n"))
ev.copy(code =
s"""
|${ev.isNull} = true;
|$resultType ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
|$codes
""".stripMargin)
}
}
/**
* A function that returns the greatest value of all parameters, skipping null values.
* It takes at least 2 parameters, and returns null iff all parameters are null.
*/
@ExpressionDescription(
usage = "_FUNC_(expr, ...) - Returns the greatest value of all parameters, skipping null values.",
examples = """
Examples:
> SELECT _FUNC_(10, 9, 2, 4, 3);
10
""")
case class Greatest(children: Seq[Expression]) extends Expression {
override def nullable: Boolean = children.forall(_.nullable)
override def foldable: Boolean = children.forall(_.foldable)
private lazy val ordering = TypeUtils.getInterpretedOrdering(dataType)
override def checkInputDataTypes(): TypeCheckResult = {
if (children.length <= 1) {
TypeCheckResult.TypeCheckFailure(
s"input to function $prettyName requires at least two arguments")
} else if (children.map(_.dataType).distinct.count(_ != NullType) > 1) {
TypeCheckResult.TypeCheckFailure(
s"The expressions should all have the same type," +
s" got GREATEST(${children.map(_.dataType.simpleString).mkString(", ")}).")
} else {
TypeUtils.checkForOrderingExpr(dataType, s"function $prettyName")
}
}
override def dataType: DataType = children.head.dataType
override def eval(input: InternalRow): Any = {
children.foldLeft[Any](null)((r, c) => {
val evalc = c.eval(input)
if (evalc != null) {
if (r == null || ordering.gt(evalc, r)) evalc else r
} else {
r
}
})
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val evalChildren = children.map(_.genCode(ctx))
ev.isNull = JavaCode.isNullGlobal(ctx.addMutableState(CodeGenerator.JAVA_BOOLEAN, ev.isNull))
val evals = evalChildren.map(eval =>
s"""
|${eval.code}
|${ctx.reassignIfGreater(dataType, ev, eval)}
""".stripMargin
)
val resultType = CodeGenerator.javaType(dataType)
val codes = ctx.splitExpressionsWithCurrentInputs(
expressions = evals,
funcName = "greatest",
extraArguments = Seq(resultType -> ev.value),
returnType = resultType,
makeSplitFunction = body =>
s"""
|$body
|return ${ev.value};
""".stripMargin,
foldFunctions = _.map(funcCall => s"${ev.value} = $funcCall;").mkString("\\n"))
ev.copy(code =
s"""
|${ev.isNull} = true;
|$resultType ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
|$codes
""".stripMargin)
}
}
| szhem/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala | Scala | apache-2.0 | 22,391 |
package services.history
import java.util.UUID
import com.github.mauricio.async.db.Connection
import models.database.queries.history.RequestLogQueries
import models.history.RequestLog
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import services.database.Database
object RequestHistoryService {
def insert(log: RequestLog) = Database.execute(RequestLogQueries.insert(log)).map(i => log)
def searchRequests(q: String, orderBy: String, page: Int) = for {
count <- Database.query(RequestLogQueries.searchCount(q))
list <- Database.query(RequestLogQueries.search(q, orderBy, Some(page)))
} yield count -> list
def getCountByUser(id: UUID) = Database.query(RequestLogQueries.getRequestCountForUser(id))
def removeRequestsByUser(userId: UUID, conn: Option[Connection]) = Database.execute(RequestLogQueries.RemoveRequestsByUser(userId), conn)
}
| agilemobiledev/boilerplay | app/services/history/RequestHistoryService.scala | Scala | apache-2.0 | 883 |
/**
* <pre>
* scala> (new AndOrSpec).execute()
* AndOrSpec:
* The ScalaTest Matchers DSL
* should provide
* an and operator that
* - returns silently when evaluating true and true
* - throws a TestFailedException when evaluating true and false
* - that throws a TestFailedException when evaluating false and true
* - throws a TestFailedException when evaluating false and false
* an or operator that
* - returns silently when evaluating true or true
* - returns silently when evaluating true or false
* - returns silently when evaluating false or true
* - throws a TestFailedException when evaluating false or false
* </pre>
*/
class SI_4507
| felixmulder/scala | test/scaladoc/resources/SI_4507.scala | Scala | bsd-3-clause | 701 |
/**
* Copyright (C) 2017 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.fr.persistence.relational.rest
import java.sql.Connection
import enumeratum._
import org.orbeon.oxf.fr.persistence.relational.{Provider, RelationalUtils}
import org.orbeon.oxf.http.{Headers, HttpStatusCodeException, StatusCode}
import org.orbeon.oxf.pipeline.api.PipelineContext
import org.orbeon.oxf.processor.generator.RequestGenerator
import org.orbeon.oxf.util.CoreUtils._
import org.orbeon.oxf.util.IOUtils.useAndClose
import org.orbeon.oxf.util.NetUtils
import scala.util.Try
trait LockUnlock extends RequestResponse {
import Private._
def lock(req: Request): Unit = {
import LeaseStatus._
val timeout = readTimeoutFromHeader
readLeaseStatus(req) { (connection, leaseStatus, dataPart, reqLockInfo) ⇒
leaseStatus match {
case DoesNotExist ⇒
LockSql.createLease(connection, req.provider, dataPart, reqLockInfo.username, reqLockInfo.groupname, timeout)
case ExistsCanUse ⇒
LockSql.updateLease(connection, req.provider, dataPart, reqLockInfo.username, reqLockInfo.groupname, timeout)
case ExistsCanNotUse(existingLease) ⇒
issueLockedResponse(existingLease)
}
}
}
def unlock(req: Request): Unit = {
import LeaseStatus._
readLeaseStatus(req) { (connection, leaseStatus, dataPart, reqLockInfo) ⇒
leaseStatus match {
case DoesNotExist ⇒
// NOP, we're already good
case ExistsCanUse ⇒
LockSql.removeLease(connection, dataPart)
case ExistsCanNotUse(existingLease) ⇒
issueLockedResponse(existingLease)
}
}
}
private object Private {
sealed trait LeaseStatus extends EnumEntry
object LeaseStatus extends Enum[LeaseStatus] {
val values = findValues
case object ExistsCanUse extends LeaseStatus
case class ExistsCanNotUse(existingLease: LockSql.Lease) extends LeaseStatus
case object DoesNotExist extends LeaseStatus
}
def issueLockedResponse(existingLease: LockSql.Lease): Unit = {
httpResponse.setStatus(StatusCode.Locked)
httpResponse.setHeader(Headers.ContentType, "application/xml")
httpResponse.getOutputStream.pipe(useAndClose(_)(os ⇒
LockInfo.serialize(LockInfo(existingLease.lockInfo.username, existingLease.lockInfo.groupname), os)
))
}
def readTimeoutFromHeader: Int = {
// Header has the form `Timeout: Second-600` header
val prefix = Headers.TimeoutValuePrefix
val request = NetUtils.getExternalContext.getRequest
val headerValue = request.getFirstHeader(Headers.TimeoutLower)
val timeoutString = headerValue.flatMap(hv ⇒ hv.startsWith(prefix).option(hv.substring(prefix.length)))
val timeoutInt = timeoutString.flatMap(ts ⇒ Try(ts.toInt).toOption)
timeoutInt.getOrElse(throw HttpStatusCodeException(StatusCode.BadRequest))
}
def readLeaseStatus(
req : Request)(
thunk : (Connection, LeaseStatus, DataPart, LockInfo) ⇒ Unit
): Unit = {
import LeaseStatus._
val bodyInputStream = RequestGenerator.getRequestBody(PipelineContext.get) match {
case Some(bodyURL) ⇒ NetUtils.uriToInputStream(bodyURL)
case None ⇒ NetUtils.getExternalContext.getRequest.getInputStream
}
val reqLockInfo = LockInfo.parse(bodyInputStream)
req.dataPart match {
case None ⇒ throw HttpStatusCodeException(StatusCode.BadRequest)
case Some(dataPart) ⇒
RelationalUtils.withConnection { connection ⇒
def callThunk(leaseStatus: LeaseStatus): Unit =
thunk(connection, leaseStatus, dataPart, reqLockInfo)
Provider.withLockedTable(connection, req.provider, "orbeon_form_data_lease", () ⇒
LockSql.readLease(connection, req.provider, dataPart) match {
case Some(lease) ⇒
val canUseExistingLease =
reqLockInfo.username == lease.lockInfo.username || lease.timeout <= 0
if (canUseExistingLease)
callThunk(ExistsCanUse)
else
callThunk(ExistsCanNotUse(lease))
case None ⇒
callThunk(DoesNotExist)
}
)
}
}
}
}
}
| brunobuzzi/orbeon-forms | form-runner/jvm/src/main/scala/org/orbeon/oxf/fr/persistence/relational/rest/LockUnlock.scala | Scala | lgpl-2.1 | 5,066 |
package okapies.finagle.kafka.protocol
import _root_.kafka.common.ErrorMapping
case class KafkaError(code: Short /* int16 */) {
def throwException() = ErrorMapping.maybeThrowException(code)
override def toString = code match {
case ErrorMapping.UnknownCode => "Unknown"
case ErrorMapping.NoError => "NoError"
case ErrorMapping.OffsetOutOfRangeCode => "OffsetOutOfRange"
case ErrorMapping.InvalidMessageCode => "InvalidMessage"
case ErrorMapping.UnknownTopicOrPartitionCode => "UnknownTopicOrPartition"
case ErrorMapping.InvalidFetchSizeCode => "InvalidFetchSize"
case ErrorMapping.LeaderNotAvailableCode => "LeaderNotAvailable"
case ErrorMapping.NotLeaderForPartitionCode => "NotLeaderForPartition"
case ErrorMapping.RequestTimedOutCode => "RequestTimedOut"
case ErrorMapping.BrokerNotAvailableCode => "BrokerNotAvailable"
case ErrorMapping.ReplicaNotAvailableCode => "ReplicaNotAvailable"
case ErrorMapping.MessageSizeTooLargeCode => "MessageSizeTooLarge"
case ErrorMapping.StaleControllerEpochCode => "StaleControllerEpoch"
case ErrorMapping.OffsetMetadataTooLargeCode => "OffsetMetadataTooLarge"
case ErrorMapping.OffsetsLoadInProgressCode => "OffsetsLoadInProgress"
case ErrorMapping.ConsumerCoordinatorNotAvailableCode => "ConsumerCoordinatorNotAvailable"
case ErrorMapping.NotCoordinatorForConsumerCode => "NotCoordinatorForConsumer"
case _ => super.toString
}
}
object KafkaError {
final val Unknown = KafkaError(ErrorMapping.UnknownCode)
final val NoError = KafkaError(ErrorMapping.NoError)
final val OffsetOutOfRange = KafkaError(ErrorMapping.OffsetOutOfRangeCode)
final val InvalidMessage = KafkaError(ErrorMapping.InvalidMessageCode)
final val UnknownTopicOrPartition = KafkaError(ErrorMapping.UnknownTopicOrPartitionCode)
final val InvalidFetchSize = KafkaError(ErrorMapping.InvalidFetchSizeCode)
final val LeaderNotAvailable = KafkaError(ErrorMapping.LeaderNotAvailableCode)
final val NotLeaderForPartition = KafkaError(ErrorMapping.NotLeaderForPartitionCode)
final val RequestTimedOut = KafkaError(ErrorMapping.RequestTimedOutCode)
final val BrokerNotAvailable = KafkaError(ErrorMapping.BrokerNotAvailableCode)
final val ReplicaNotAvailable = KafkaError(ErrorMapping.ReplicaNotAvailableCode)
final val MessageSizeTooLarge = KafkaError(ErrorMapping.MessageSizeTooLargeCode)
final val StaleControllerEpoch = KafkaError(ErrorMapping.StaleControllerEpochCode)
final val OffsetMetadataTooLarge = KafkaError(ErrorMapping.OffsetMetadataTooLargeCode)
final val OffsetsLoadInProgress = KafkaError(ErrorMapping.OffsetsLoadInProgressCode)
final val ConsumerCoordinatorNotAvailable = KafkaError(ErrorMapping.ConsumerCoordinatorNotAvailableCode)
final val NotCoordinatorForConsumer = KafkaError(ErrorMapping.NotCoordinatorForConsumerCode)
}
class KafkaCodecException(message: String = null, cause: Throwable = null)
extends RuntimeException(message: String, cause: Throwable)
| yonglehou/finagle-kafka | src/main/scala/okapies/finagle/kafka/protocol/KafkaError.scala | Scala | apache-2.0 | 3,016 |
package io.finch.internal
import scala.concurrent.ExecutionContext
object DummyExecutionContext extends ExecutionContext {
def execute(runnable: Runnable): Unit = runnable.run()
def reportFailure(cause: Throwable): Unit = throw new NotImplementedError()
}
| finagle/finch | core/src/main/scala/io/finch/internal/DummyExecutionContext.scala | Scala | apache-2.0 | 262 |
/**
* Illustrates a simple aggregate in scala to compute the average of an RDD
*/
package com.oreilly.learningsparkexamples.scala
import org.apache.spark._
import org.apache.spark.rdd.RDD
object BasicAvg {
def main(args: Array[String]) {
val master = args.length match {
case x: Int if x > 0 => args(0)
case _ => "local"
}
val sc = new SparkContext(master, "BasicAvg", System.getenv("SPARK_HOME"))
val input = sc.parallelize(List(1,2,3,4))
val result = computeAvg(input)
val avg = result._1 / result._2.toFloat
println(result)
}
def computeAvg(input: RDD[Int]) = {
input.aggregate((0, 0))((x, y) => (x._1 + y, x._2 + 1),
(x,y) => (x._1 + y._1, x._2 + y._2))
}
}
| huydx/learning-spark | src/main/scala/com/oreilly/learningsparkexamples/scala/BasicAvg.scala | Scala | mit | 724 |
package net.ruippeixotog.ebaysniper.util
import akka.http.scaladsl.model.HttpEntity
import akka.http.scaladsl.server.Directive0
import akka.http.scaladsl.server.Directives._
import org.slf4j.LoggerFactory
trait Logging {
lazy val log = LoggerFactory.getLogger(getClass.getName)
}
trait RoutingLogging extends Logging {
val logServiceRequest: Directive0 = extractRequest.map { req =>
val entityData = req.entity match {
case HttpEntity.Strict(_, data) => data.decodeString("UTF-8")
case _ => "<non-strict>"
}
log.info("{} {} {}", req.method.value, req.uri.path, entityData)
()
}
}
| ruippeixotog/ebay-snipe-server | src/main/scala/net/ruippeixotog/ebaysniper/util/Logging.scala | Scala | mit | 618 |
/*
This file is part of Intake24.
Copyright 2015, 2016 Newcastle University.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package uk.ac.ncl.openlab.intake24.services.dataexport.guice
import com.amazonaws.services.s3.{AmazonS3, AmazonS3ClientBuilder}
import com.google.inject.{AbstractModule, Provides}
import javax.inject.Singleton
import play.api.Configuration
class AmazonWebServicesModule extends AbstractModule {
override def configure() = {
}
@Provides
@Singleton
def createS3client(config: Configuration): AmazonS3 = {
AmazonS3ClientBuilder.defaultClient()
}
}
| digitalinteraction/intake24 | DataExportService/app/uk/ac/ncl/openlab/intake24/services/dataexport/guice/AmazonWebServicesModule.scala | Scala | apache-2.0 | 1,074 |
package se.gigurra.leavu3.mfd
import com.badlogic.gdx.graphics.Color
import com.google.common.base.Splitter
import se.gigurra.leavu3.datamodel.{Configuration, CounterMeasures, DlinkData, EngineIndicators, GameData, Payload, Vec2, self}
import se.gigurra.leavu3.gfx.{Blink, CountDown}
import se.gigurra.leavu3.gfx.RenderContext._
import se.gigurra.leavu3.interfaces.{DcsRemote, GameIn}
import scala.collection.JavaConversions._
/**
* Created by kjolh on 3/12/2016.
*/
case class SmsPage(implicit config: Configuration, mfd: MfdIfc) extends Page("SMS", Page.Priorities.SMS) {
implicit val p = screenProjection
val yTitle = 0.85
val top = yTitle - 0.15
val bottom = -0.5
val height = top - bottom
def draw(game: GameData, dlinkIn: Seq[(String, DlinkData)]): Unit = {
at((0.0, yTitle)) {
"STORES".drawCentered(WHITE, 1.5f)
}
drawBoxPayload(game.payload)
drawTextPayload(game.payload, game.electronicWarf.counterMeasures, game.indicators.engines)
}
def drawBoxPayload(payload: Payload): Unit = {
val typ2Station =
payload.stations.zipWithIndex
.groupBy(_._1.typ).toSeq
.sortBy(_._1.fullName)
.filterNot(_._1.fullName.contains("unknown"))
val nTypes = typ2Station.size
val lineHeight = height / math.max(1.0, nTypes.toDouble)
val padding = 0.25 * lineHeight
val boxHeight = lineHeight - padding
val boxWidth = 0.8
val xCtr = -0.4
val iSelected = payload.currentStation - 1
var i = 0
for ((typ, seqStationAndPylonIndex) <- typ2Station) {
val pylonIndices = seqStationAndPylonIndex.map(_._2)
val isSelected = pylonIndices.contains(iSelected)
val head = seqStationAndPylonIndex.head._1
val name = head.typ.fullName.take(15)
val count = seqStationAndPylonIndex.map(_._1.count).sum
val yCtr = top - i.toDouble * lineHeight - lineHeight / 2.0
val color = if (isSelected) WHITE else if (count > 0) GREEN else GRAY
at((xCtr, yCtr)) {
rect(width = boxWidth, height = boxHeight, typ = LINE, color = color)
val text = s" ${count.toString.pad(3)}x $name".padRight(25)
text.drawCentered(color)
}
i += 1
}
}
val blinkCountdownTime = 4.0
def newCountdown(): CountDown = CountDown(blinkCountdownTime)
val blink = Blink(Seq(true, false), 0.5)
var gunBlinkCountdown = CountDown(0.0)
var chaffBlinkCountdown = CountDown(0.0)
var flareBlinkCountdown = CountDown(0.0)
var gunLastCycle = 0
var chaffLastCycle = 0
var flareLastCycle = 0
def getRightSideTextColor(count: Int, countDown: CountDown): Color = {
countDown.isReached match {
case true => if (count == 0) RED else GREEN
case false => if (blink) YELLOW else if (count == 0) RED else GREEN
}
}
def bingo = DcsRemote.remoteConfig.missionSettings.bingo
def joker = DcsRemote.remoteConfig.missionSettings.joker
def getFuelTimeStringUntil(current: Double, target: Double): (String, Color) = {
val playtimeTotalSeconds = math.max(0.0, (current - target) / GameIn.estimatedFueldConsumption)
val playtimeTotalMinutes = (playtimeTotalSeconds / 60.0).round
val playtimeHours = (playtimeTotalSeconds / 3600.0).floor.round
val playtimeMinutes = playtimeTotalMinutes - playtimeHours * 60L
val playtimeColor = if (playtimeTotalMinutes > 10) GREEN else if (playtimeTotalMinutes > 0) YELLOW else RED
(s"${playtimeHours.pad(2, '0')}:${playtimeMinutes.pad(2, '0')}", playtimeColor)
}
def drawTextPayload(payload: Payload, cms: CounterMeasures, eng: EngineIndicators): Unit = {
val scale = config.symbolScale * 0.03 / font.getSpaceWidth
batched {
at((0.5, top - font.lineHeight * scale)) {
transform(_.scalexy(scale)) {
val leftPad = 8
val rightPad = 22
var n = 0
def nextLine(): Unit = {
n += 1
}
def drawTextLine(left: Any, right: String, color: Color): Unit = {
val str = s"${left.toString.pad(leftPad)}x $right".padRight(rightPad)
transform(_.translate(y = -n.toFloat * font.getLineHeight))(str.drawRaw(color = color))
nextLine()
}
def drawFuel(name: String, value: Double, bingo: Double, joker: Double, rounding: Long = 10): Unit = {
val m1 = 1.0 / rounding.toDouble
val m2 = rounding
val fuelNumber = Splitter.fixedLength(3).split(((value * kg_to_fuelUnit * m1).round*m2).toString.reverse).mkString(".").reverse
val fuelColor = if (value < bingo) RED else if (value < joker) YELLOW else GREEN
drawTextLine(fuelNumber, name, fuelColor)
}
drawTextLine(payload.cannon.shells, "Gun", getRightSideTextColor(payload.cannon.shells, gunBlinkCountdown))
nextLine()
drawTextLine(cms.chaff, "Chaff", getRightSideTextColor(cms.chaff, chaffBlinkCountdown))
drawTextLine(cms.flare, "Flare", getRightSideTextColor(cms.flare, flareBlinkCountdown))
nextLine()
drawFuel("Fuel", eng.fuelTotal, bingo = bingo * lbs_to_kg, joker = joker * lbs_to_kg)
drawFuel("internal", eng.fuelInternal, bingo = bingo * lbs_to_kg, joker = joker * lbs_to_kg)
drawFuel("external", eng.fuelExternal, bingo = 1, joker = 0)
nextLine()
drawFuel("Flow", GameIn.estimatedFueldConsumption * 3600.0, bingo = 100, joker = 1000, rounding = 100)
def drawFuelFlow(target: Double, name: String): Unit = {
val flow = getFuelTimeStringUntil(eng.fuelTotal, target)
drawTextLine(flow._1, s"T->$name", flow._2)
}
if (GameIn.estimatedFueldConsumption > 0.001) {
drawFuelFlow(joker * lbs_to_kg, "Joker")
drawFuelFlow(bingo * lbs_to_kg, "Bingo")
drawFuelFlow(0, "Empty")
}
nextLine()
drawFuel("lbs=Joker", joker * lbs_to_kg, -1, -1)
drawFuel("lbs=Bingo", bingo * lbs_to_kg, -1, -1)
if (payload.cannon.shells < gunLastCycle)
gunBlinkCountdown = newCountdown()
if (cms.chaff < chaffLastCycle)
chaffBlinkCountdown = newCountdown()
if (cms.flare < flareLastCycle)
flareBlinkCountdown = newCountdown()
gunLastCycle = payload.cannon.shells
chaffLastCycle = cms.chaff
flareLastCycle = cms.flare
}
}
}
}
}
| GiGurra/leavu3 | src/main/scala/se/gigurra/leavu3/mfd/SmsPage.scala | Scala | mit | 6,628 |
package org.broadinstitute.dsde.workbench.test.api
import java.util.UUID
import akka.actor.ActorSystem
import akka.http.scaladsl.model.HttpMethods.GET
import akka.http.scaladsl.model.{HttpRequest, StatusCodes}
import akka.http.scaladsl.unmarshalling.Unmarshal
import akka.testkit.TestKitBase
import org.broadinstitute.dsde.workbench.auth.{AuthToken, AuthTokenScopes, ServiceAccountAuthTokenFromJson, ServiceAccountAuthTokenFromPem}
import org.broadinstitute.dsde.workbench.config.{Credentials, UserPool}
import org.broadinstitute.dsde.workbench.dao.Google.{googleDirectoryDAO, googleIamDAO}
import org.broadinstitute.dsde.workbench.fixture.BillingFixtures
import org.broadinstitute.dsde.workbench.model._
import org.broadinstitute.dsde.workbench.model.google.{GoogleProject, ServiceAccountName}
import org.broadinstitute.dsde.workbench.service.Sam.sendRequest
import org.broadinstitute.dsde.workbench.service.SamModel._
import org.broadinstitute.dsde.workbench.service.test.CleanUp
import org.broadinstitute.dsde.workbench.service.{Orchestration, Sam, Thurloe, _}
import org.broadinstitute.dsde.workbench.test.SamConfig
import org.scalatest.concurrent.{Eventually, ScalaFutures}
import org.scalatest.time.{Seconds, Span}
import org.broadinstitute.dsde.workbench.service.util.Tags
import scala.concurrent.Await
import scala.concurrent.duration.{Duration, _}
import org.scalatest.freespec.AnyFreeSpec
import org.scalatest.matchers.should.Matchers
class SamApiSpec extends AnyFreeSpec with BillingFixtures with Matchers with ScalaFutures with CleanUp with Eventually with TestKitBase {
implicit override val patienceConfig: PatienceConfig = PatienceConfig(timeout = scaled(Span(5, Seconds)))
implicit lazy val system = ActorSystem()
val gcsConfig = SamConfig.GCS
def registerAsNewUser(email: WorkbenchEmail)(implicit authToken: AuthToken): Unit = {
val newUserProfile = Orchestration.profile.BasicProfile (
firstName = "Generic",
lastName = "Testerson",
title = "User",
contactEmail = Option(email.value),
institute = "Broad",
institutionalProgram = "DSP",
programLocationCity = "Cambridge",
programLocationState = "MA",
programLocationCountry = "USA",
pi = "Albus Dumbledore",
nonProfitStatus = "true"
)
Orchestration.profile.registerUser(newUserProfile)
}
def removeUser(subjectId: String): Unit = {
implicit val token: AuthToken = UserPool.chooseAdmin.makeAuthToken()
if (Sam.admin.doesUserExist(subjectId).getOrElse(false)) {
Sam.admin.deleteUser(subjectId)
}
Thurloe.keyValuePairs.deleteAll(subjectId)
}
"Sam test utilities" - {
"should be idempotent for removal of user's registration" in {
// use a temp user because they should not be registered. Remove them after!
val tempUser: Credentials = UserPool.chooseTemp
val tempAuthToken: AuthToken = tempUser.makeAuthToken()
// Register user if the user is not registered
val tempUserInfo = Sam.user.status()(tempAuthToken) match {
case Some(user) => {
logger.info(s"User ${user.userInfo.userEmail} was already registered.")
user.userInfo
}
case None => {
logger.info (s"User ${tempUser.email} does not yet exist! Registering user.")
Sam.user.registerSelf()(tempAuthToken)
Sam.user.status()(tempAuthToken).get.userInfo
}
}
tempUserInfo.userEmail shouldBe tempUser.email
// Remove user
removeUser(tempUserInfo.userSubjectId)
Sam.user.status()(tempAuthToken) shouldBe None
// OK to re-remove
removeUser(tempUserInfo.userSubjectId)
Sam.user.status()(tempAuthToken) shouldBe None
}
}
"Sam" - {
"should return terms of services with auth token" in {
val anyUser: Credentials = UserPool.chooseAnyUser
val userAuthToken: AuthToken = anyUser.makeAuthToken()
val response = Sam.getRequest(Sam.url + s"tos/text")(userAuthToken)
val textFuture = Unmarshal(response.entity).to[String]
response.status shouldEqual StatusCodes.OK
whenReady(textFuture) { text =>
text.isEmpty shouldBe false
}
}
"should return terms of services with no auth token" in {
val req = HttpRequest(GET, Sam.url + s"tos/text")
val response = sendRequest(req)
val textFuture = Unmarshal(response.entity).to[String]
response.status shouldEqual StatusCodes.OK
whenReady(textFuture) { text =>
text.isEmpty shouldBe false
}
}
"should give pets the same access as their owners" in {
val anyUser: Credentials = UserPool.chooseAnyUser
val userAuthToken: AuthToken = anyUser.makeAuthToken()
val owner: Credentials = UserPool.chooseProjectOwner
// set auth tokens explicitly to control which credentials are used
val userStatus = Sam.user.status()(userAuthToken).get
// user a brand new billing project to ensure known state for pet (not present)
withBrandNewBillingProject("new-pet-test") { projectName =>
val petAccountEmail = Sam.user.petServiceAccountEmail(projectName)(userAuthToken)
petAccountEmail.value should not be userStatus.userInfo.userEmail
googleIamDAO.findServiceAccount(GoogleProject(projectName), petAccountEmail).futureValue.map(_.email) shouldBe Some(petAccountEmail)
// first call should create pet. confirm that a second call to create/retrieve gives the same results
Sam.user.petServiceAccountEmail(projectName)(userAuthToken) shouldBe petAccountEmail
val petAuthToken = ServiceAccountAuthTokenFromJson(Sam.user.petServiceAccountKey(projectName)(userAuthToken))
Sam.user.status()(petAuthToken) shouldBe Some(userStatus)
// who is my pet -> who is my user's pet -> it's me
Sam.user.petServiceAccountEmail(projectName)(petAuthToken) shouldBe petAccountEmail
}(owner.makeAuthToken(AuthTokenScopes.billingScopes))
}
"should not treat non-pet service accounts as pets" in {
val saEmail = WorkbenchEmail(gcsConfig.qaEmail)
implicit val saAuthToken = ServiceAccountAuthTokenFromPem(gcsConfig.qaEmail, gcsConfig.pathToQAPem)
// I am no one's pet. I am myself.
Sam.user.status()(saAuthToken).map(_.userInfo.userEmail) shouldBe Some(saEmail.value)
}
"should retrieve a user's proxy group as any user" in {
val Seq(user1: Credentials, user2: Credentials) = UserPool.chooseStudents(2)
val authToken1: AuthToken = user1.makeAuthToken()
val authToken2: AuthToken = user2.makeAuthToken()
val info1 = Sam.user.status()(authToken1).get.userInfo
val info2 = Sam.user.status()(authToken2).get.userInfo
val email1 = WorkbenchEmail(Sam.user.status()(authToken1).get.userInfo.userEmail)
val email2 = WorkbenchEmail(Sam.user.status()(authToken2).get.userInfo.userEmail)
val username1 = email1.value.split("@").head
val username2 = email2.value.split("@").head
val userId1 = Sam.user.status()(authToken1).get.userInfo.userSubjectId
val userId2 = Sam.user.status()(authToken2).get.userInfo.userSubjectId
val proxyGroup1_1 = Sam.user.proxyGroup(email1.value)(authToken1)
val proxyGroup1_2 = Sam.user.proxyGroup(email1.value)(authToken2)
val proxyGroup2_1 = Sam.user.proxyGroup(email2.value)(authToken1)
val proxyGroup2_2 = Sam.user.proxyGroup(email2.value)(authToken2)
val expectedProxyEmail1 = s"$userId1@${gcsConfig.appsDomain}"
proxyGroup1_1.value should endWith (expectedProxyEmail1)
proxyGroup1_2.value should endWith (expectedProxyEmail1)
val expectedProxyEmail2 = s"$userId2@${gcsConfig.appsDomain}"
proxyGroup2_1.value should endWith (expectedProxyEmail2)
proxyGroup2_2.value should endWith (expectedProxyEmail2)
}
"should retrieve a user's proxy group from a pet service account email as any user" in {
val Seq(user1: Credentials, user2: Credentials) = UserPool.chooseStudents(2)
val authToken1: AuthToken = user1.makeAuthToken()
val authToken2: AuthToken = user2.makeAuthToken()
withCleanBillingProject(UserPool.chooseProjectOwner, List(user1.email, user2.email)) { project =>
val email = WorkbenchEmail(Sam.user.status()(authToken1).get.userInfo.userEmail)
val username = email.value.split("@").head
val userId = Sam.user.status()(authToken1).get.userInfo.userSubjectId
val petSAEmail = Sam.user.petServiceAccountEmail(project)(authToken1)
val proxyGroup_1 = Sam.user.proxyGroup(petSAEmail.value)(authToken1)
val proxyGroup_2 = Sam.user.proxyGroup(petSAEmail.value)(authToken2)
val expectedProxyEmail = s"$userId@${gcsConfig.appsDomain}"
proxyGroup_1.value should endWith(expectedProxyEmail)
proxyGroup_2.value should endWith(expectedProxyEmail)
}
}
"should furnish a new service account key and cache it for further retrievals" in {
val user = UserPool.chooseStudent
withCleanBillingProject(UserPool.chooseProjectOwner, List(user.email)) { project =>
withCleanUp {
val key1 = Sam.user.petServiceAccountKey(project)(user.makeAuthToken())
val key2 = Sam.user.petServiceAccountKey(project)(user.makeAuthToken())
key1 shouldBe key2
register cleanUp Sam.user.deletePetServiceAccountKey(project, getFieldFromJson(key1, "private_key_id"))(user.makeAuthToken())
}
}
}
"should furnish a new service account key after deleting a cached key" in {
val user = UserPool.chooseStudent
withCleanBillingProject(UserPool.chooseProjectOwner, List(user.email)) { project =>
withCleanUp {
val key1 = Sam.user.petServiceAccountKey(project)(user.makeAuthToken())
Sam.user.deletePetServiceAccountKey(project, getFieldFromJson(key1, "private_key_id"))(user.makeAuthToken())
val key2 = Sam.user.petServiceAccountKey(project)(user.makeAuthToken())
register cleanUp Sam.user.deletePetServiceAccountKey(project, getFieldFromJson(key2, "private_key_id"))(user.makeAuthToken())
key1 shouldNot be(key2)
}
}
}
//this is ignored because there is a permission error with GPAlloc that needs to be looked into.
//in a GPAlloc'd project, the firecloud service account does not have permission to remove the pet SA
// @mbemis
"should re-create a pet SA in google even if it still exists in sam" ignore {
val user = UserPool.chooseStudent
//this must use a GPAlloc'd project to avoid deleting the pet for a shared project, which
//may have unexpected side effects
withCleanBillingProject(user) { projectName =>
withCleanUp {
val petSaKeyOriginal = Sam.user.petServiceAccountKey(projectName)(user.makeAuthToken())
val petSaEmailOriginal = getFieldFromJson(petSaKeyOriginal, "client_email")
val petSaKeyIdOriginal = getFieldFromJson(petSaKeyOriginal, "private_key_id")
val petSaName = petSaEmailOriginal.split('@').head
register cleanUp Sam.user.deletePetServiceAccountKey(projectName, petSaKeyIdOriginal)(user.makeAuthToken())
//act as a rogue process and delete the pet SA without telling sam
Await.result(googleIamDAO.removeServiceAccount(GoogleProject(projectName), ServiceAccountName(petSaName)), Duration.Inf)
val petSaKeyNew = Sam.user.petServiceAccountKey(projectName)(user.makeAuthToken())
val petSaEmailNew = getFieldFromJson(petSaKeyNew, "client_email")
val petSaKeyIdNew = getFieldFromJson(petSaKeyNew, "private_key_id")
register cleanUp Sam.user.deletePetServiceAccountKey(projectName, petSaKeyIdNew)(user.makeAuthToken())
petSaEmailOriginal should equal(petSaEmailNew) //sanity check to make sure the SA is the same
petSaKeyIdOriginal should not equal petSaKeyIdNew //make sure we were able to generate a new key and that a new one was returned
}
}
}
"should get an access token for a user's own pet service account" in {
val user = UserPool.chooseStudent
withCleanBillingProject(UserPool.chooseProjectOwner, List(user.email)) { project =>
// get my pet's email
val petEmail1 = Sam.user.petServiceAccountEmail(project)(user.makeAuthToken())
val scopes = Set("https://www.googleapis.com/auth/userinfo.email", "https://www.googleapis.com/auth/userinfo.profile")
// get my pet's token
val petToken = Sam.user.petServiceAccountToken(project, scopes)(user.makeAuthToken())
// convert string token to an AuthToken
val petAuthToken = new AuthToken {
override def buildCredential() = ???
override lazy val value = petToken
}
// get my pet's email using my pet's token
val petEmail2 = Sam.user.petServiceAccountEmail(project)(petAuthToken)
// result should be the same
petEmail2 shouldBe petEmail1
}
}
"should arbitrarily choose a project to return a pet key for when the user has existing pets" in {
val user = UserPool.chooseStudent
val userToken = user.makeAuthToken()
withCleanBillingProject(UserPool.chooseProjectOwner, List(user.email)) { project1 =>
withCleanBillingProject(UserPool.chooseProjectOwner, List(user.email)) { project2 =>
// get my pet's email thru the arbitrary key endpoint
val petEmailArbitrary = getFieldFromJson(Sam.user.arbitraryPetServiceAccountKey()(userToken), "client_email")
//get the user's subject id for comparison below
val userSubjectId = Sam.user.status()(userToken).get.userInfo.userSubjectId
// result should be a pet associated with the user
assert(petEmailArbitrary.contains(userSubjectId))
}
}
}
"should arbitrarily choose a project to return a pet key for when the user has no existing pets" in {
val user = UserPool.chooseStudent
val userToken = user.makeAuthToken()
val userSubjectId = Sam.user.status()(userToken).get.userInfo.userSubjectId
// get my pet's email thru the arbitrary key endpoint
val petEmailArbitrary = getFieldFromJson(Sam.user.arbitraryPetServiceAccountKey()(userToken), "client_email")
assert(petEmailArbitrary.contains(userSubjectId))
}
"should arbitrarily choose a project to return a pet token for when the user has existing pets" in {
val user = UserPool.chooseStudent
withCleanBillingProject(UserPool.chooseProjectOwner, List(user.email)) { project =>
// get my pet's email
val petEmail1 = Sam.user.petServiceAccountEmail(project)(user.makeAuthToken())
val scopes = Set("https://www.googleapis.com/auth/userinfo.email", "https://www.googleapis.com/auth/userinfo.profile")
// get my pet's token
val petToken = Sam.user.arbitraryPetServiceAccountToken(scopes)(user.makeAuthToken())
// convert string token to an AuthToken
val petAuthToken = new AuthToken {
override def buildCredential() = ???
override lazy val value = petToken
}
// get my pet's email using my pet's token
val petEmail2 = Sam.user.petServiceAccountEmail(project)(petAuthToken)
// result should be the same
petEmail2 shouldBe petEmail1
}
}
"should arbitrarily choose a project to return a pet token for when the user has no existing pets" in {
val user = UserPool.chooseStudent
val scopes = Set("https://www.googleapis.com/auth/userinfo.email", "https://www.googleapis.com/auth/userinfo.profile")
// get my pet's token
val petToken = Sam.user.arbitraryPetServiceAccountToken(scopes)(user.makeAuthToken())
// convert string token to an AuthToken
val petAuthToken = new AuthToken {
override def buildCredential() = ???
override lazy val value = petToken
}
// get my pet's email using my pet's token
val petEmail1 = getFieldFromJson(Sam.user.arbitraryPetServiceAccountKey()(user.makeAuthToken()), "client_email")
val petEmail2 = getFieldFromJson(Sam.user.arbitraryPetServiceAccountKey()(petAuthToken), "client_email")
// result should be the same
petEmail2 shouldBe petEmail1
}
"should synchronize groups with Google" taggedAs Tags.ExcludeInAlpha in {
val managedGroupId = UUID.randomUUID.toString
val adminPolicyName = "admin"
val Seq(user1: Credentials, user2: Credentials, user3: Credentials) = UserPool.chooseStudents(3)
val user1AuthToken = user1.makeAuthToken()
val Seq(user1Proxy: WorkbenchEmail, user2Proxy: WorkbenchEmail, user3Proxy: WorkbenchEmail) = Seq(user1, user2, user3).map(user => Sam.user.proxyGroup(user.email)(user1AuthToken))
Sam.user.createGroup(managedGroupId)(user1AuthToken)
register cleanUp Sam.user.deleteGroup(managedGroupId)(user1AuthToken)
val policies = Sam.user.listResourcePolicies("managed-group", managedGroupId)(user1AuthToken)
val policyEmail = policies.collect {
case SamModel.AccessPolicyResponseEntry(_, policy, email) if policy.memberEmails.nonEmpty => email
}
assert(policyEmail.size == 1) // Only the admin policy should be non-empty after creation
// The admin policy should contain only the user that created the group
awaitAssert(
Await.result(googleDirectoryDAO.listGroupMembers(policyEmail.head), 5.minutes)
.getOrElse(Set.empty) should contain theSameElementsAs Set(user1Proxy.value),
5.minutes, 5.seconds)
// Change the membership of the admin policy to include users 1 and 2
Sam.user.setPolicyMembers(managedGroupId, adminPolicyName, Set(user1.email, user2.email))(user1AuthToken)
awaitAssert(
Await.result(googleDirectoryDAO.listGroupMembers(policyEmail.head), 5.minutes)
.getOrElse(Set.empty) should contain theSameElementsAs Set(user1Proxy.value, user2Proxy.value),
5.minutes, 5.seconds)
// Add user 3 to the admin policy
Sam.user.addUserToPolicy(managedGroupId, adminPolicyName, user3.email)(user1AuthToken)
awaitAssert(
Await.result(googleDirectoryDAO.listGroupMembers(policyEmail.head), 5.minutes)
.getOrElse(Set.empty) should contain theSameElementsAs Set(user1Proxy.value, user2Proxy.value, user3Proxy.value),
5.minutes, 5.seconds)
// Remove user 2 from the admin policy
Sam.user.removeUserFromPolicy(managedGroupId, adminPolicyName, user2.email)(user1AuthToken)
awaitAssert(
Await.result(googleDirectoryDAO.listGroupMembers(policyEmail.head), 5.minutes)
.getOrElse(Set.empty) should contain theSameElementsAs Set(user1Proxy.value, user3Proxy.value),
5.minutes, 5.seconds)
}
"should only synchronize the intersection group for policies constrained by auth domains" taggedAs Tags.ExcludeInAlpha in {
val authDomainId = UUID.randomUUID.toString
val Seq(inPolicyUser: Credentials, inAuthDomainUser: Credentials, inBothUser: Credentials) = UserPool.chooseStudents(3)
val inBothUserAuthToken = inBothUser.makeAuthToken()
val Seq(inAuthDomainUserProxy: WorkbenchEmail, inBothUserProxy: WorkbenchEmail) = Seq(inAuthDomainUser, inBothUser).map {
user => Sam.user.proxyGroup(user.email)(inBothUserAuthToken)
}
// Create group that will act as auth domain
Sam.user.createGroup(authDomainId)(inBothUserAuthToken)
register cleanUp Sam.user.deleteGroup(authDomainId)(inBothUserAuthToken)
val authDomainPolicies = Sam.user.listResourcePolicies("managed-group", authDomainId)(inBothUserAuthToken)
val authDomainAdminEmail = for {
policy <- authDomainPolicies if policy.policy.memberEmails.nonEmpty
} yield {
policy.email
}
assert(authDomainAdminEmail.size == 1)
awaitAssert(
Await.result(googleDirectoryDAO.listGroupMembers(authDomainAdminEmail.head), 5.minutes)
.getOrElse(Set.empty) should contain theSameElementsAs Set(inBothUserProxy.value),
5.minutes, 5.seconds)
Sam.user.setPolicyMembers(authDomainId, "admin", Set(inAuthDomainUser.email, inBothUser.email))(inBothUserAuthToken)
awaitAssert(
Await.result(googleDirectoryDAO.listGroupMembers(authDomainAdminEmail.head), 5.minutes)
.getOrElse(Set.empty) should contain theSameElementsAs Set(inBothUserProxy.value, inAuthDomainUserProxy.value),
5.minutes, 5.seconds)
val resourceTypeName = "workspace"
val resourceId = UUID.randomUUID.toString
val ownerPolicyName = "owner"
val policies = Map(ownerPolicyName -> AccessPolicyMembership(Set(inBothUser.email), Set.empty, Set(ownerPolicyName)))
val resourceRequest = CreateResourceRequest(resourceId, policies, Set(authDomainId))
// Create constrained resource
Sam.user.createResource(resourceTypeName, resourceRequest)(inBothUserAuthToken)
register cleanUp Sam.user.deleteResource(resourceTypeName, resourceId)(inBothUserAuthToken)
Sam.user.addUserToResourcePolicy(resourceTypeName, resourceId, ownerPolicyName, inPolicyUser.email)(inBothUserAuthToken)
val resourcePolicies = Sam.user.listResourcePolicies(resourceTypeName, resourceId)(inBothUserAuthToken)
val resourceOwnerEmail = resourcePolicies.collect {
case SamModel.AccessPolicyResponseEntry(_, policy, email) if policy.memberEmails.nonEmpty => email
}
assert(resourceOwnerEmail.size == 1)
Sam.user.syncResourcePolicy(resourceTypeName, resourceId, ownerPolicyName)(inBothUserAuthToken)
// Google should only know about the user that is in both the auth domain group and the constrained policy
awaitAssert(
Await.result(googleDirectoryDAO.listGroupMembers(resourceOwnerEmail.head), 5.minutes)
.getOrElse(Set.empty) should contain theSameElementsAs Set(inBothUserProxy.value),
5.minutes, 5.seconds)
}
"should only synchronize all policy members for constrainable policies without auth domains" taggedAs Tags.ExcludeInAlpha in {
val Seq(policyUser: Credentials, policyUser1: Credentials, policyUser2: Credentials) = UserPool.chooseStudents(3)
val policyUser2Token = policyUser2.makeAuthToken()
val Seq(policyUser1Proxy: WorkbenchEmail, policyUser2Proxy: WorkbenchEmail) = Seq(policyUser1, policyUser2).map {
user => Sam.user.proxyGroup(user.email)(policyUser2Token)
}
val resourceTypeName = "workspace"
val resourceId = UUID.randomUUID.toString
val ownerPolicyName = "owner"
val policies = Map(ownerPolicyName -> AccessPolicyMembership(Set(policyUser2.email), Set.empty, Set(ownerPolicyName)))
val resourceRequest = CreateResourceRequest(resourceId, policies, Set.empty) //create constrainable resource but not actually constrained
// Create constrainable resource
Sam.user.createResource(resourceTypeName, resourceRequest)(policyUser2Token)
register cleanUp Sam.user.deleteResource(resourceTypeName, resourceId)(policyUser2Token)
Sam.user.addUserToResourcePolicy(resourceTypeName, resourceId, ownerPolicyName, policyUser1.email)(policyUser2Token)
val resourcePolicies = Sam.user.listResourcePolicies(resourceTypeName, resourceId)(policyUser2Token)
val resourceOwnerEmail = resourcePolicies.collect {
case SamModel.AccessPolicyResponseEntry(_, policy, email) if policy.memberEmails.nonEmpty => email
}
assert(resourceOwnerEmail.size == 1)
Sam.user.syncResourcePolicy(resourceTypeName, resourceId, ownerPolicyName)(policyUser2Token)
// Google should only know about the user that is in both the auth domain group and the constrained policy
awaitAssert(
Await.result(googleDirectoryDAO.listGroupMembers(resourceOwnerEmail.head), 5.minutes)
.getOrElse(Set.empty) should contain theSameElementsAs Set(policyUser1Proxy.value, policyUser2Proxy.value),
5.minutes, 5.seconds)
}
"should synchronize the all users group for public policies" taggedAs Tags.ExcludeInAlpha in {
val resourceId = UUID.randomUUID.toString
val user1 = UserPool.chooseStudent
val user1AuthToken = user1.makeAuthToken()
val user1Proxy = Sam.user.proxyGroup(user1.email)(user1AuthToken)
val allUsersGroupEmail = Sam.user.getGroupEmail("All_Users")(user1AuthToken)
val resourceTypeName = "managed-group"
val adminPolicyName = "admin"
val adminNotifierPolicyName = "admin-notifier"
Sam.user.createGroup(resourceId)(user1AuthToken)
register cleanUp Sam.user.deleteGroup(resourceId)(user1AuthToken)
val policies = Sam.user.listResourcePolicies(resourceTypeName, resourceId)(user1AuthToken)
val adminPolicy = policies.filter(_.policyName equals adminPolicyName).last
val adminNotifierPolicy = policies.filter(_.policyName equals adminNotifierPolicyName).last
awaitAssert(
Await.result(googleDirectoryDAO.listGroupMembers(adminPolicy.email), 5.minutes)
.getOrElse(Set.empty) should contain theSameElementsAs Set(user1Proxy.value),
5.minutes, 5.seconds)
Sam.user.syncResourcePolicy(resourceTypeName, resourceId, adminNotifierPolicyName)(user1AuthToken)
Sam.user.makeResourcePolicyPublic(resourceTypeName, resourceId, adminNotifierPolicyName, true)(user1AuthToken)
awaitAssert(
Await.result(googleDirectoryDAO.listGroupMembers(adminNotifierPolicy.email), 5.minutes)
.getOrElse(Set.empty) should contain theSameElementsAs Set(allUsersGroupEmail.value),
5.minutes, 5.seconds)
}
"should not allow pet creation in a project that belongs to an external org" in {
val userAuthToken = UserPool.chooseAnyUser.makeAuthToken()
val restException = intercept[RestException] {
Sam.user.petServiceAccountEmail(gcsConfig.serviceProject)(userAuthToken)
}
import spray.json._
restException.message.parseJson.asJsObject.fields("statusCode") shouldBe JsNumber(400)
}
}
private def getFieldFromJson(jsonKey: String, field: String): String = {
import spray.json._
jsonKey.parseJson.asJsObject.getFields(field).head.asInstanceOf[JsString].value
}
}
| broadinstitute/sam | automation/src/test/scala/org/broadinstitute/dsde/test/api/SamApiSpec.scala | Scala | bsd-3-clause | 26,374 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.singlevaluerow
import org.apache.spark.sql.common.util.QueryTest
import org.scalatest.BeforeAndAfterAll
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.CarbonProperties
class TestEmptyRows extends QueryTest with BeforeAndAfterAll {
override def beforeAll {
sql("drop table if exists emptyRowCarbonTable")
sql("drop table if exists emptyRowHiveTable")
//eid,ename,sal,presal,comm,deptno,Desc
sql(
"create table if not exists emptyRowCarbonTable (eid int,ename String,sal decimal,presal " +
"decimal,comm decimal" +
"(37,37),deptno decimal(18,2),Desc String) STORED BY 'org.apache.carbondata.format'"
)
sql(
"create table if not exists emptyRowHiveTable(eid int,ename String,sal decimal,presal " +
"decimal,comm " +
"decimal(37,37),deptno decimal(18,2),Desc String)row format delimited fields " +
"terminated by ','"
)
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
val csvFilePath = s"$resourcesPath/emptyrow/emptyRows.csv"
sql(
s"""LOAD DATA INPATH '$csvFilePath' INTO table emptyRowCarbonTable OPTIONS('DELIMITER'=',','QUOTECHAR'='"','FILEHEADER'='eid,ename,sal,presal,comm,deptno,Desc')""")
sql(
"LOAD DATA LOCAL INPATH '" + csvFilePath + "' into table " +
"emptyRowHiveTable"
)
}
test("select eid from table") {
checkAnswer(
sql("select eid from emptyRowCarbonTable"),
sql("select eid from emptyRowHiveTable")
)
}
test("select Desc from emptyRowTable") {
checkAnswer(
sql("select Desc from emptyRowCarbonTable"),
sql("select Desc from emptyRowHiveTable")
)
}
override def afterAll {
sql("drop table emptyRowCarbonTable")
sql("drop table emptyRowHiveTable")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
}
}
| JihongMA/incubator-carbondata | integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/emptyrow/TestEmptyRows.scala | Scala | apache-2.0 | 2,841 |
/*
* Wire
* Copyright (C) 2016 Wire Swiss GmbH
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.waz.threading
import java.util.concurrent.ConcurrentLinkedQueue
import java.util.concurrent.atomic.AtomicInteger
import android.os.{Handler, Looper}
import com.waz.log.BasicLogging.LogTag
import com.waz.log.LogSE._
import com.waz.utils.crypto.ZSecureRandom
import scala.annotation.tailrec
import scala.concurrent.ExecutionContext
trait DispatchQueue extends ExecutionContext {
private[threading] val name: String = "queue_" + ZSecureRandom.nextInt().toHexString
/**
* Executes a task on this queue.
* @param task - operation to perform on this queue.
*/
def apply[A](task: => A)(implicit tag: LogTag = LogTag("")): CancellableFuture[A] = CancellableFuture(task)(this, tag)
//TODO: this implements ExecutionContext.reportFailure, should we use different log here? or maybe do something else
override def reportFailure(t: Throwable): Unit = error(l"reportFailure called", t)(LogTag(name))
//used for waiting in tests
def hasRemainingTasks: Boolean = false
}
object DispatchQueue {
def apply(concurrentTasks: Int = 0, executor: ExecutionContext = Threading.ThreadPool) = concurrentTasks match {
case 0 => new UnlimitedDispatchQueue(executor)
case 1 => new SerialDispatchQueue(executor)
case _ => new LimitedDispatchQueue(concurrentTasks, executor)
}
}
class UnlimitedDispatchQueue(executor: ExecutionContext = Threading.ThreadPool, override val name: String = "UnlimitedQueue") extends DispatchQueue {
override def execute(runnable: Runnable): Unit = executor.execute(DispatchQueueStats(name, runnable))
}
class UiDispatchQueue() extends DispatchQueue {
override private[threading] val name: String = "UiDispatchQueue"
private val handler = new Handler(Looper.getMainLooper)
override def execute(runnable: Runnable): Unit = handler.post(DispatchQueueStats(name, runnable))
}
/**
* Execution context limiting number of concurrently executing tasks.
* All tasks are executed on parent execution context.
*/
class LimitedDispatchQueue(concurrencyLimit: Int = 1, parent: ExecutionContext = Threading.ThreadPool, override val name: String = "LimitedQueue") extends DispatchQueue {
require(concurrencyLimit > 0, "concurrencyLimit should be greater than 0")
override def execute(runnable: Runnable): Unit = Executor.dispatch(runnable)
override def reportFailure(cause: Throwable): Unit = parent.reportFailure(cause)
private object Executor extends Runnable {
val queue = new ConcurrentLinkedQueue[Runnable]
val runningCount = new AtomicInteger(0)
def dispatch(runnable: Runnable): Unit = {
queue.add(DispatchQueueStats(name, runnable))
dispatchExecutor()
}
def dispatchExecutor(): Unit = {
if (runningCount.getAndIncrement < concurrencyLimit)
parent.execute(this)
else if (runningCount.decrementAndGet() < concurrencyLimit && !queue.isEmpty)
dispatchExecutor() // to prevent race condition when executor has just finished
}
override def run(): Unit = {
@tailrec
def executeBatch(counter: Int = 0): Unit = queue.poll() match {
case null => // done
case runnable =>
try {
runnable.run()
} catch {
case cause: Throwable => reportFailure(cause)
}
if (counter < LimitedDispatchQueue.MaxBatchSize) executeBatch(counter + 1)
}
executeBatch()
if (runningCount.decrementAndGet() < concurrencyLimit && !queue.isEmpty)
dispatchExecutor()
}
}
override def hasRemainingTasks = !Executor.queue.isEmpty || Executor.runningCount.get() > 0
}
object LimitedDispatchQueue {
/**
* Maximum number of tasks to execute in single batch.
* Used to prevent starving of other contexts using common parent.
*/
val MaxBatchSize = 100
}
class SerialDispatchQueue(executor: ExecutionContext = Threading.ThreadPool, override val name: String = "serial_" + ZSecureRandom.nextInt().toHexString) extends LimitedDispatchQueue(1, executor)
object SerialDispatchQueue {
def apply()(implicit logTag: LogTag): SerialDispatchQueue = new SerialDispatchQueue(name = s"${logTag}_SerialDispatchQueue")
} | wireapp/wire-android-sync-engine | zmessaging/src/main/scala/com/waz/threading/DispatchQueue.scala | Scala | gpl-3.0 | 4,855 |
import language.higherKinds
trait Functor[F[_]] {
def map[A,B](functor: F[A])(g: A => B): F[B]
}
object Functor {
def map[F[_]:Functor,A,B](a: F[A])(g: A => B): F[B] =
implicitly[Functor[F]].map(a)(g)
implicit object FunctorOption extends Functor[Option] {
override def map[A,B](option: Option[A])(g: A => B): Option[B] =
option match {
case None => None
case Some(a) => Some(g(a))
}
}
implicit object FunctorList extends Functor[List] {
override def map[A,B](list: List[A])(g: A => B): List[B] =
list match {
case Nil => Nil
case h :: t => g(h) :: map(t)(g)
}
}
implicit object FunctorTree extends Functor[Tree] {
override def map[A,B](tree: Tree[A])(g: A => B): Tree[B] =
tree match {
case Leaf => Leaf
case Node(left, elem, right) => Node(map(left)(g), g(elem), map(right)(g))
}
}
}
| grzegorzbalcerek/scala-exercises | Travers/Functor.scala | Scala | bsd-2-clause | 902 |
package org.scalatra
import javax.servlet.http.HttpServletResponse
import test.scalatest.ScalatraFunSuite
class AfterTestServlet extends ScalatraServlet {
after() {
response.setStatus(204)
}
after("/some/path") {
response.setStatus(202)
}
after("/other/path") {
response.setStatus(206)
}
get("/some/path") { }
get("/other/path") { }
get("/third/path") { }
}
class AfterTest extends ScalatraFunSuite {
addServlet(classOf[AfterTestServlet], "/*")
test("afterAll is applied to all paths") {
get("/third/path") {
status should equal(204)
}
}
test("after only applies to a given path") {
get("/some/path") {
status should equal(202)
}
get("/other/path") {
status should equal(206)
}
}
}
| kuochaoyi/scalatra | core/src/test/scala/org/scalatra/AfterTest.scala | Scala | bsd-2-clause | 777 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan
import org.apache.calcite.rel.RelNode
import org.apache.flink.api.scala._
import org.apache.flink.table.api.Table
import org.apache.flink.table.api.scala._
import org.apache.flink.table.plan.nodes.datastream._
import org.apache.flink.table.utils.TableTestUtil._
import org.apache.flink.table.utils.{StreamTableTestUtil, TableTestBase}
import org.junit.Assert._
import org.junit.{Ignore, Test}
class RetractionRulesTest extends TableTestBase {
def streamTestForRetractionUtil(): StreamTableTestForRetractionUtil = {
new StreamTableTestForRetractionUtil()
}
@Test
def testSelect(): Unit = {
val util = streamTestForRetractionUtil()
val table = util.addTable[(String, Int)]('word, 'number)
val resultTable = table.select('word, 'number)
val expected = s"DataStreamScan(false, Acc)"
util.verifyTableTrait(resultTable, expected)
}
// one level unbounded groupBy
@Test
def testGroupBy(): Unit = {
val util = streamTestForRetractionUtil()
val table = util.addTable[(String, Int)]('word, 'number)
val defaultStatus = "false, Acc"
val resultTable = table
.groupBy('word)
.select('number.count)
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupAggregate",
"DataStreamScan(true, Acc)",
s"$defaultStatus"
),
s"$defaultStatus"
)
util.verifyTableTrait(resultTable, expected)
}
// two level unbounded groupBy
@Test
def testTwoGroupBy(): Unit = {
val util = streamTestForRetractionUtil()
val table = util.addTable[(String, Int)]('word, 'number)
val defaultStatus = "false, Acc"
val resultTable = table
.groupBy('word)
.select('word, 'number.count as 'count)
.groupBy('count)
.select('count, 'count.count as 'frequency)
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupAggregate",
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupAggregate",
"DataStreamScan(true, Acc)",
"true, AccRetract"
),
"true, AccRetract"
),
s"$defaultStatus"
),
s"$defaultStatus"
)
util.verifyTableTrait(resultTable, expected)
}
// group window
@Test
def testGroupWindow(): Unit = {
val util = streamTestForRetractionUtil()
val table = util.addTable[(String, Int)]('word, 'number, 'rowtime.rowtime)
val defaultStatus = "false, Acc"
val resultTable = table
.window(Tumble over 50.milli on 'rowtime as 'w)
.groupBy('w, 'word)
.select('word, 'number.count as 'count)
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupWindowAggregate",
"DataStreamScan(true, Acc)",
s"$defaultStatus"
),
s"$defaultStatus"
)
util.verifyTableTrait(resultTable, expected)
}
// group window after unbounded groupBy
@Test
@Ignore // cannot pass rowtime through non-windowed aggregation
def testGroupWindowAfterGroupBy(): Unit = {
val util = streamTestForRetractionUtil()
val table = util.addTable[(String, Int)]('word, 'number, 'rowtime.rowtime)
val defaultStatus = "false, Acc"
val resultTable = table
.groupBy('word)
.select('word, 'number.count as 'count)
.window(Tumble over 50.milli on 'rowtime as 'w)
.groupBy('w, 'count)
.select('count, 'count.count as 'frequency)
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupWindowAggregate",
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupAggregate",
"DataStreamScan(true, Acc)",
"true, AccRetract"
),
"true, AccRetract"
),
s"$defaultStatus"
),
s"$defaultStatus"
)
util.verifyTableTrait(resultTable, expected)
}
// over window
@Test
def testOverWindow(): Unit = {
val util = streamTestForRetractionUtil()
util.addTable[(String, Int)]("T1", 'word, 'number, 'proctime.proctime)
val defaultStatus = "false, Acc"
val sqlQuery =
"SELECT " +
"word, count(number) " +
"OVER (PARTITION BY word ORDER BY proctime " +
"ROWS BETWEEN UNBOUNDED preceding AND CURRENT ROW)" +
"FROM T1"
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamOverAggregate",
"DataStreamScan(true, Acc)",
s"$defaultStatus"
),
s"$defaultStatus"
)
util.verifySqlTrait(sqlQuery, expected)
}
// over window after unbounded groupBy
@Test
@Ignore // cannot pass rowtime through non-windowed aggregation
def testOverWindowAfterGroupBy(): Unit = {
val util = streamTestForRetractionUtil()
util.addTable[(String, Int)]("T1", 'word, 'number, 'proctime.proctime)
val defaultStatus = "false, Acc"
val sqlQuery =
"SELECT " +
"_count, count(word) " +
"OVER (PARTITION BY _count ORDER BY proctime " +
"ROWS BETWEEN UNBOUNDED preceding AND CURRENT ROW)" +
"FROM " +
"(SELECT word, count(number) as _count FROM T1 GROUP BY word) "
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamOverAggregate",
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupAggregate",
"DataStreamScan(true, Acc)",
"true, AccRetract"
),
"true, AccRetract"
),
s"$defaultStatus"
),
s"$defaultStatus"
)
util.verifySqlTrait(sqlQuery, expected)
}
// test binaryNode
@Test
def testBinaryNode(): Unit = {
val util = streamTestForRetractionUtil()
val lTable = util.addTable[(String, Int)]('word, 'number)
val rTable = util.addTable[(String, Long)]('word_r, 'count_r)
val defaultStatus = "false, Acc"
val resultTable = lTable
.groupBy('word)
.select('word, 'number.count as 'count)
.unionAll(rTable)
.groupBy('count)
.select('count, 'count.count as 'frequency)
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupAggregate",
unaryNode(
"DataStreamCalc",
binaryNode(
"DataStreamUnion",
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupAggregate",
"DataStreamScan(true, Acc)",
"true, AccRetract"
),
"true, AccRetract"
),
"DataStreamScan(true, Acc)",
"true, AccRetract"
),
"true, AccRetract"
),
s"$defaultStatus"
),
s"$defaultStatus"
)
util.verifyTableTrait(resultTable, expected)
}
}
class StreamTableTestForRetractionUtil extends StreamTableTestUtil {
def verifySqlTrait(query: String, expected: String): Unit = {
verifyTableTrait(tableEnv.sql(query), expected)
}
def verifyTableTrait(resultTable: Table, expected: String): Unit = {
val relNode = resultTable.getRelNode
val optimized = tableEnv.optimize(relNode, updatesAsRetraction = false)
val actual = TraitUtil.toString(optimized)
assertEquals(
expected.split("\\n").map(_.trim).mkString("\\n"),
actual.split("\\n").map(_.trim).mkString("\\n"))
}
}
object TraitUtil {
def toString(rel: RelNode): String = {
val className = rel.getClass.getSimpleName
var childString: String = ""
var i = 0
while (i < rel.getInputs.size()) {
childString += TraitUtil.toString(rel.getInput(i))
i += 1
}
val retractString = rel.getTraitSet.getTrait(UpdateAsRetractionTraitDef.INSTANCE).toString
val accModetString = rel.getTraitSet.getTrait(AccModeTraitDef.INSTANCE).toString
s"""$className($retractString, $accModetString)
|$childString
|""".stripMargin.stripLineEnd
}
}
| zohar-mizrahi/flink | flink-libraries/flink-table/src/test/scala/org/apache/flink/table/plan/RetractionRulesTest.scala | Scala | apache-2.0 | 9,070 |
package io.youi.paint
import io.youi.Updates
import io.youi.drawable.{Context, Drawable}
import io.youi.util.CanvasPool
import org.scalajs.dom.CanvasPattern
class DrawablePaint[D <: Drawable](drawable: D,
repetition: Repetition,
width: => Double,
height: => Double,
x: => Double,
y: => Double,
ratio: => Double) extends PatternPaint {
modified := drawable.modified()
override def update(delta: Double): Unit = {
super.update(delta: Double)
drawable match {
case u: Updates => u.update(delta)
case _ => // Not updatable
}
}
override def createPattern(): CanvasPattern = {
val r = ratio
CanvasPool.withCanvas(width * r, height * r) { canvas =>
val context = new Context(canvas, r)
drawable.draw(context, x, y)
context.createPattern(repetition)
}
}
} | outr/youi | gui/src/main/scala/io/youi/paint/DrawablePaint.scala | Scala | mit | 1,026 |
/*
* Copyright 2001-2008 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
/**
* Exception thrown to indicate a test is pending.
*
* <p>
* A <em>pending test</em> is one that has been given a name but is not yet implemented. The purpose of
* pending tests is to facilitate a style of testing in which documentation of behavior is sketched
* out before tests are written to verify that behavior (and often, the before the behavior of
* the system being tested is itself implemented). Such sketches form a kind of specification of
* what tests and functionality to implement later.
* </p>
*
* <p>
* To support this style of testing, a test can be given a name that specifies one
* bit of behavior required by the system being tested. The test can also include some code that
* sends more information about the behavior to the reporter when the tests run. At the end of the test,
* it can call method <code>cancel</code>, which will cause it to complete abruptly with <code>TestCanceledException</code>.
* Because tests in ScalaTest can be designated as canceled with <code>TestCanceledException</code>, both the test name and any information
* sent to the reporter when running the test can appear in the report of a test run. (In other words,
* the code of a canceled test is executed just like any other test.) However, because the test completes abruptly
* with <code>TestCanceledException</code>, the test will be reported as canceled, to indicate
* there are missing dependencies and the test cannot determine either a pass or fail.
* </p>
*
* @author Travis Stevens
*/
class TestCanceledException(
messageFun: StackDepthException => Option[String],
cause: Option[Throwable],
failedCodeStackDepthFun: StackDepthException => Int
) extends StackDepthException(messageFun, cause, failedCodeStackDepthFun)
with ModifiableMessage[TestCanceledException] {
/**
* Constructs a <code>TestCanceledException</code> with pre-determined <code>message</code> and <code>failedCodeStackDepth</code>. (This was
* the primary constructor form prior to ScalaTest 1.5.)
*
* @param message an optional detail message for this <code>StackDepthException</code>.
* @param cause an optional cause, the <code>Throwable</code> that caused this <code>StackDepthException</code> to be thrown.
* @param failedCodeStackDepth the depth in the stack trace of this exception at which the line of test code that failed resides.
*
* @throws NullPointerException if either <code>message</code> of <code>cause</code> is <code>null</code>, or <code>Some(null)</code>.
*/
def this(message: Option[String], cause: Option[Throwable], failedCodeStackDepth: Int) =
this(
StackDepthException.toExceptionFunction(message),
cause,
e => failedCodeStackDepth
)
/**
* Create a <code>TestCanceledException</code> with specified stack depth and no detail message or cause.
*
* @param failedCodeStackDepth the depth in the stack trace of this exception at which the line of test code that failed resides.
*
*/
def this(failedCodeStackDepth: Int) = this(None, None, failedCodeStackDepth)
/**
* Create a <code>TestCanceledException</code> with a specified stack depth and detail message.
*
* @param message A detail message for this <code>TestCanceledException</code>.
* @param failedCodeStackDepth the depth in the stack trace of this exception at which the line of test code that failed resides.
*
* @throws NullPointerException if <code>message</code> is <code>null</code>.
*/
def this(message: String, failedCodeStackDepth: Int) =
this(
{ Option(message) },
None,
failedCodeStackDepth
)
/**
* Create a <code>TestCanceledException</code> with the specified stack depth and cause. The
* <code>message</code> field of this exception object will be initialized to
* <code>if (cause.getMessage == null) "" else cause.getMessage</code>.
*
* @param cause the cause, the <code>Throwable</code> that caused this <code>TestCanceledException</code> to be thrown.
* @param failedCodeStackDepth the depth in the stack trace of this exception at which the line of test code that failed resides.
*
* @throws NullPointerException if <code>cause</code> is <code>null</code>.
*/
def this(cause: Throwable, failedCodeStackDepth: Int) =
this(
{
if (cause == null) throw new NullPointerException("cause was null")
Some(if (cause.getMessage == null) "" else cause.getMessage)
},
Some(cause),
failedCodeStackDepth
)
/**
* Create a <code>TestCancelledException</code> with the specified stack depth, detail
* message, and cause.
*
* <p>Note that the detail message associated with cause is
* <em>not</em> automatically incorporated in this throwable's detail
* message.
*
* @param message A detail message for this <code>TestCanceledException</code>.
* @param cause the cause, the <code>Throwable</code> that caused this <code>TestCanceledException</code> to be thrown.
* @param failedCodeStackDepth the depth in the stack trace of this exception at which the line of test code that failed resides.
*
* @throws NullPointerException if either <code>message</code> or <code>cause</code> is <code>null</code>.
*/
def this(message: String, cause: Throwable, failedCodeStackDepth: Int) =
this(
{
if (message == null) throw new NullPointerException("message was null")
Some(message)
},
{
if (cause == null) throw new NullPointerException("cause was null")
Some(cause)
},
failedCodeStackDepth
)
/**
* Returns an exception of class <code>TestCanceledException</code> with <code>failedExceptionStackDepth</code> set to 0 and
* all frames above this stack depth severed off. This can be useful when working with tools (such as IDEs) that do not
* directly support ScalaTest. (Tools that directly support ScalaTest can use the stack depth information delivered
* in the StackDepth exceptions.)
*/
def severedAtStackDepth: TestCanceledException = {
val truncated = getStackTrace.drop(failedCodeStackDepth)
val e = new TestCanceledException(message, cause, 0)
e.setStackTrace(truncated)
e
}
/**
* Returns an instance of this exception's class, identical to this exception,
* except with the detail message option string replaced with the result of passing
* the current detail message to the passed function, <code>fun</code>.
*
* @param fun A function that, given the current optional detail message, will produce
* the modified optional detail message for the result instance of <code>TestCanceledException</code>.
*/
def modifyMessage(fun: Option[String] => Option[String]): TestCanceledException = {
val mod = new TestCanceledException(fun(message), cause, failedCodeStackDepth)
mod.setStackTrace(getStackTrace)
mod
}
/**
* Indicates whether this object can be equal to the passed object.
*/
override def canEqual(other: Any): Boolean = other.isInstanceOf[TestCanceledException]
/**
* Indicates whether this object is equal to the passed object. If the passed object is
* a <code>TestCanceledException</code>, equality requires equal <code>message</code>,
* <code>cause</code>, and <code>failedCodeStackDepth</code> fields, as well as equal
* return values of <code>getStackTrace</code>.
*/
override def equals(other: Any): Boolean =
other match {
case that: TestCanceledException => super.equals(that)
case _ => false
}
/**
* Returns a hash code value for this object.
*/
// Don't need to change it. Implementing it only so as to not freak out people who know
// that if you override equals you must override hashCode.
override def hashCode: Int = super.hashCode
} | yyuu/scalatest | src/main/scala/org/scalatest/TestCanceledException.scala | Scala | apache-2.0 | 8,468 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct
import uk.gov.hmrc.ct.box.{CtBoolean, CtBoxIdentifier, Input}
case class HMRCFiling(value: Boolean) extends CtBoxIdentifier("HMRC filing") with CtBoolean with Input
| pncampbell/ct-calculations | src/main/scala/uk/gov/hmrc/ct/HMRCFiling.scala | Scala | apache-2.0 | 795 |
object ImpureLazy2 {
def f(): Int = {
var counter = 0
lazy val x = {
counter = counter + 1
counter
}
counter + x + x
} ensuring (_ == 2)
}
| epfl-lara/stainless | frontends/benchmarks/extraction/invalid/ImpureLazy2.scala | Scala | apache-2.0 | 171 |
package controllers.school
import controllers.helpers.CRUDController
import models.school
class MicrodegreeRevisionProposal extends CRUDController[school.MicrodegreeRevisionProposals, school.MicrodegreeRevisionProposal] {
/**
* @inheritdoc
*/
def resourceCollection =
school.MicrodegreeRevisionProposals
}
| Xanho/xanho-api | app/controllers/school/MicrodegreeRevisionProposal.scala | Scala | apache-2.0 | 326 |
package org.vaadin.addons.rinne
import com.vaadin.ui.BrowserFrame
import org.vaadin.addons.rinne.mixins.{ComponentMixin, AbstractComponentMixin, AbstractEmbeddedMixin}
class VBrowserFrame extends BrowserFrame with AbstractEmbeddedMixin
| LukaszByczynski/rinne | src/main/scala/org/vaadin/addons/rinne/VBrowserFrame.scala | Scala | apache-2.0 | 238 |
package eu.semberal.parsercombinatorsexample
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.FunSuite
import eu.semberal.parsercombinatorsexample.IPParser
@RunWith(classOf[JUnitRunner])
class IPTest extends FunSuite {
test("Test if IP parsing using regexps and parser combinator produces the same result") {
val parser = new IPParser
val ips = List("1.1.1.1", "1.2.3.256", "17.jko.32.", "255.255.255.255", "....", "13.14.35.179", "", " ", ".", "112 .32.15.17", "112.32.14.17", "112.32.14.1722", "112.32.14.172 ")
ips.foreach {
x => expect(parser.parseUsingRegexp(x))(parser.parseUsingParserCombinator(x))
}
}
}
| semberal/blog-examples | parser-combinators-and-what-if-regular-expressions-are-not-enough/src/test/scala/eu/semberal/parsercombinatorsexample/IPTest.scala | Scala | apache-2.0 | 690 |
package chat.tox.antox.utils
import java.io._
import android.content.Context
import android.graphics.Bitmap
import chat.tox.antox.utils.StorageType._
object FileUtils {
val imageExtensions = List("jpg", "jpeg", "gif", "png")
/**
* Gets the directory designated by 'path' from the appropriate place based on 'storageType'
*/
def getDirectory(path: String, storageType: StorageType, context: Context): File = {
if (storageType == StorageType.EXTERNAL) {
new File(path)
} else {
new File(context.getFilesDir, path)
}
}
def copy(source: File, destination: File): Unit = {
val inStream = new FileInputStream(source)
copy(inStream, destination)
}
def copy(inStream: FileInputStream, destination: File): Unit = {
val outStream = new FileOutputStream(destination)
val inChannel = inStream.getChannel
val outChannel = outStream.getChannel
inChannel.transferTo(0, inChannel.size(), outChannel)
inStream.close()
outStream.close()
}
def readToBytes(source: File): Option[Array[Byte]] = {
val f = new RandomAccessFile(source, "r")
try {
if (f.length() > Integer.MAX_VALUE) return None
val data = new Array[Byte](f.length().asInstanceOf[Int])
f.readFully(data)
Some(data)
} finally {
f.close()
}
}
def writeBitmap(bitmap: Bitmap, format: Bitmap.CompressFormat, quality: Int, destination: File): Unit = {
val outStream = new FileOutputStream(destination)
bitmap.compress(format, quality, outStream)
outStream.flush()
outStream.close()
}
def writePrivateFile(fileName: String, write: String, context: Context): Unit = {
try {
val outputStream = context.openFileOutput(fileName, Context.MODE_PRIVATE)
outputStream.write(write.getBytes)
outputStream.close()
} catch {
case e: Exception =>
e.printStackTrace()
}
}
} | wiiam/Antox | app/src/main/scala/chat/tox/antox/utils/FileUtils.scala | Scala | gpl-3.0 | 1,905 |
object lib_gen {
import lib_stream.Stream
import lib_state._
import lib_par._
import lib_par.Par.Par
import Gen._
import Prop._
import java.util.concurrent.{Executors,ExecutorService}
import language.postfixOps
import language.implicitConversions
case class Prop(run: (MaxSize,TestCases,RNG) => Result) {
def &&(p: Prop) = Prop {
(max,n,rng) => run(max,n,rng) match {
case Passed | Proved => p.run(max, n, rng)
case x => x
}
}
def ||(p: Prop) = Prop {
(max,n,rng) => run(max,n,rng) match {
// In case of failure, run the other prop.
case Falsified(msg, _) => p.tag(msg).run(max,n,rng)
case x => x
}
}
/* This is rather simplistic - in the event of failure, we simply prepend
* the given message on a newline in front of the existing message.
*/
def tag(msg: String) = Prop {
(max,n,rng) => run(max,n,rng) match {
case Falsified(e, c) => Falsified(msg + "\\n" + e, c)
case x => x
}
}
}
object Prop {
type SuccessCount = Int
type TestCases = Int
type MaxSize = Int
type FailedCase = String
sealed trait Result {
def isFalsified: Boolean
}
case object Passed extends Result {
def isFalsified = false
}
case class Falsified(failure: FailedCase,
successes: SuccessCount) extends Result {
def isFalsified = true
}
case object Proved extends Result {
def isFalsified = false
}
/* Produce an infinite random stream from a `Gen` and a starting `RNG`. */
def randomStream[A](g: Gen[A])(rng: RNG): Stream[A] =
Stream.unfold(rng)(rng => Some(g.sample.run(rng)))
def forAll[A](as: Gen[A])(f: A => Boolean): Prop = Prop {
(n,rng) => randomStream(as)(rng).zip(Stream.from(0)).take(n).map {
case (a, i) => try {
if (f(a)) Passed else Falsified(a.toString, i)
} catch { case e: Exception => Falsified(buildMsg(a, e), i) }
}.find(_.isFalsified).getOrElse(Passed)
}
// String interpolation syntax. A string starting with `s"` can refer to
// a Scala value `v` as `$v` or `${v}` in the string.
// This will be expanded to `v.toString` by the Scala compiler.
def buildMsg[A](s: A, e: Exception): String =
s"test case: $s\\n" +
s"generated an exception: ${e.getMessage}\\n" +
s"stack trace:\\n ${e.getStackTrace.mkString("\\n")}"
def apply(f: (TestCases,RNG) => Result): Prop =
Prop { (_,n,rng) => f(n,rng) }
def forAll[A](g: SGen[A])(f: A => Boolean): Prop =
forAll(g(_))(f)
def forAll[A](g: Int => Gen[A])(f: A => Boolean): Prop = Prop {
(max,n,rng) =>
val casesPerSize = (n - 1) / max + 1
val props: Stream[Prop] =
Stream.from(0).take((n min max) + 1).map(i => forAll(g(i))(f))
val prop: Prop =
props.map(p => Prop { (max, n, rng) =>
p.run(max, casesPerSize, rng)
}).toList.reduce(_ && _)
prop.run(max,n,rng)
}
def run(p: Prop,
maxSize: Int = 100,
testCases: Int = 100,
rng: RNG = RNG.Simple(System.currentTimeMillis)): Unit =
p.run(maxSize, testCases, rng) match {
case Falsified(msg, n) =>
println(s"! Falsified after $n passed tests:\\n $msg")
case Passed =>
println(s"+ OK, passed $testCases tests.")
case Proved =>
println(s"+ OK, proved property.")
}
val ES: ExecutorService = Executors.newCachedThreadPool
val p1 = Prop.forAll(Gen.unit(Par.unit(1)))(i =>
Par.map(i)(_ + 1)(ES).get == Par.unit(2)(ES).get)
def check(p: => Boolean): Prop = Prop { (_, _, _) =>
if (p) Passed else Falsified("()", 0)
}
val p2 = check {
val p = Par.map(Par.unit(1))(_ + 1)
val p2 = Par.unit(2)
p(ES).get == p2(ES).get
}
def equal[A](p: Par[A], p2: Par[A]): Par[Boolean] =
Par.map2(p,p2)(_ == _)
val p3 = check {
equal (
Par.map(Par.unit(1))(_ + 1),
Par.unit(2)
) (ES) get
}
val S = weighted(
choose(1,4).map(Executors.newFixedThreadPool) -> .75,
unit(Executors.newCachedThreadPool) -> .25) // `a -> b` is syntax sugar for `(a,b)`
def forAllPar[A](g: Gen[A])(f: A => Par[Boolean]): Prop =
forAll(S.map2(g)((_,_))) { case (s,a) => f(a)(s).get }
def checkPar(p: Par[Boolean]): Prop =
forAllPar(Gen.unit(()))(_ => p)
def forAllPar2[A](g: Gen[A])(f: A => Par[Boolean]): Prop =
forAll(S ** g) { case (s,a) => f(a)(s).get }
def forAllPar3[A](g: Gen[A])(f: A => Par[Boolean]): Prop =
forAll(S ** g) { case s ** a => f(a)(s).get }
val pint = Gen.choose(0,10) map (Par.unit(_))
val p4 =
forAllPar(pint)(n => equal(Par.map(n)(y => y), n))
val forkProp = Prop.forAllPar(pint2)(i => equal(Par.fork(i), i)) tag "fork"
}
case class Gen[+A](sample: State[RNG,A]) {
def map[B](f: A => B): Gen[B] =
Gen(sample.map(f))
def map2[B,C](g: Gen[B])(f: (A,B) => C): Gen[C] =
Gen(sample.map2(g.sample)(f))
def flatMap[B](f: A => Gen[B]): Gen[B] =
Gen(sample.flatMap(a => f(a).sample))
/* A method alias for the function we wrote earlier. */
def listOfN(size: Int): Gen[List[A]] =
Gen.listOfN(size, this)
/* A version of `listOfN` that generates the size to use dynamically. */
def listOfN(size: Gen[Int]): Gen[List[A]] =
size flatMap (n => this.listOfN(n))
def listOf: SGen[List[A]] = Gen.listOf(this)
def listOf1: SGen[List[A]] = Gen.listOf1(this)
def unsized = SGen(_ => this)
def **[B](g: Gen[B]): Gen[(A,B)] =
(this map2 g)((_,_))
}
object Gen {
def unit[A](a: => A): Gen[A] =
Gen(State.unit(a))
val boolean: Gen[Boolean] =
Gen(State(RNG.boolean))
def choose(start: Int, stopExclusive: Int): Gen[Int] =
Gen(State(RNG.nonNegativeInt).map(n => start + n % (stopExclusive-start)))
def listOfN[A](n: Int, g: Gen[A]): Gen[List[A]] =
Gen(State.sequence(List.fill(n)(g.sample)))
val uniform: Gen[Double] = Gen(State(RNG.double))
def choose(i: Double, j: Double): Gen[Double] =
Gen(State(RNG.double).map(d => i + d*(j-i)))
/* Basic idea is to add 1 to the result of `choose` if it is of the wrong
* parity, but we require some special handling to deal with the maximum
* integer in the range.
*/
def even(start: Int, stopExclusive: Int): Gen[Int] =
choose(start, if (stopExclusive%2 == 0) stopExclusive - 1 else stopExclusive).
map (n => if (n%2 != 0) n+1 else n)
def odd(start: Int, stopExclusive: Int): Gen[Int] =
choose(start, if (stopExclusive%2 != 0) stopExclusive - 1 else stopExclusive).
map (n => if (n%2 == 0) n+1 else n)
def sameParity(from: Int, to: Int): Gen[(Int,Int)] = for {
i <- choose(from,to)
j <- if (i%2 == 0) even(from,to) else odd(from,to)
} yield (i,j)
def listOfN_1[A](n: Int, g: Gen[A]): Gen[List[A]] =
List.fill(n)(g).foldRight(unit(List[A]()))((a,b) => a.map2(b)(_ :: _))
def union[A](g1: Gen[A], g2: Gen[A]): Gen[A] =
boolean.flatMap(b => if (b) g1 else g2)
def weighted[A](g1: (Gen[A],Double), g2: (Gen[A],Double)): Gen[A] = {
/* The probability we should pull from `g1`. */
val g1Threshold = g1._2.abs / (g1._2.abs + g2._2.abs)
Gen(State(RNG.double).flatMap(d => if (d < g1Threshold) g1._1.sample else g2._1.sample))
}
def listOf[A](g: Gen[A]): SGen[List[A]] =
SGen(n => g.listOfN(n))
/* Not the most efficient implementation, but it's simple.
* This generates ASCII strings.
*/
def stringN(n: Int): Gen[String] =
listOfN(n, choose(0,127)).map(_.map(_.toChar).mkString)
val string: SGen[String] = SGen(stringN)
implicit def unsized[A](g: Gen[A]): SGen[A] = SGen(_ => g)
val smallInt = Gen.choose(-10,10)
val maxProp = forAll(listOf(smallInt)) { l =>
val max = l.max
!l.exists(_ > max) // No value greater than `max` should exist in `l`
}
def listOf1[A](g: Gen[A]): SGen[List[A]] =
SGen(n => g.listOfN(n max 1))
val maxProp1 = forAll(listOf1(smallInt)) { l =>
val max = l.max
!l.exists(_ > max) // No value greater than `max` should exist in `l`
}
// We specify that every sorted list is either empty, has one element,
// or has no two consecutive elements `(a,b)` such that `a` is greater than `b`.
val sortedProp = forAll(listOf(smallInt)) { l =>
val ls = l.sorted
l.isEmpty || ls.tail.isEmpty || !ls.zip(ls.tail).exists { case (a,b) => a > b }
}
object ** {
def unapply[A,B](p: (A,B)) = Some(p)
}
/* A `Gen[Par[Int]]` generated from a list summation that spawns a new parallel
* computation for each element of the input list summed to produce the final
* result. This is not the most compelling example, but it provides at least some
* variation in structure to use for testing.
*
* Note that this has to be a `lazy val` because of the way Scala initializes objects.
* It depends on the `Prop` companion object being created, which references `pint2`.
*/
lazy val pint2: Gen[Par[Int]] = choose(-100,100).listOfN(choose(0,20)).map(l =>
l.foldLeft(Par.unit(0))((p,i) =>
Par.fork { Par.map2(p, Par.unit(i))(_ + _) }))
def genStringIntFn(g: Gen[Int]): Gen[String => Int] =
g map (i => (s => i))
}
case class SGen[+A](g: Int => Gen[A]) {
def apply(n: Int): Gen[A] = g(n)
def map[B](f: A => B): SGen[B] =
SGen { g(_) map f }
def flatMap[B](f: A => SGen[B]): SGen[B] = {
val g2: Int => Gen[B] = n => {
g(n) flatMap { f(_).g(n) }
}
SGen(g2)
}
def **[B](s2: SGen[B]): SGen[(A,B)] =
SGen(n => apply(n) ** s2(n))
}
}
| rucka/fpinscala | src/main/scala/fpinscala/lib/Gen.scala | Scala | gpl-2.0 | 9,892 |
package controllers.helpers
import java.util.UUID
import _root_.play.api.http.ContentTypes
import _root_.play.api.libs.json.{JsValue, Json}
import _root_.play.api.mvc.{Action, Controller}
import models.Helpers.Columns
import slick.driver.MySQLDriver.api._
import system.helpers.PropertyValidators.PropertyErrorCodes
import system.helpers._
trait CRUDController[T <: Table[R] with Columns.Id[R], R <: Resource] extends Controller with Secured {
def resourceCollection: ResourceCollection[T, R]
/**
* An [[Action]] to create a new resource in the [[resourceCollection]]
* @return An [[Action]]
*/
def create =
Authorized(None, Set(resourceCollection.canCreate))(parse.json)(request =>
resourceCollection.validateArguments(request.data.as[Map[String, JsValue]]) match {
case m if m.isEmpty =>
resourceCollection.create(request.data.as[Map[String, JsValue]])
.fold(InternalServerError(ResponseHelpers.message("Something broke.")))(item =>
Ok(Json.toJson(item)(resourceCollection.writes))
)
case m =>
BadRequest(ResponseHelpers.invalidFields(m))
}
)
/**
* An [[Action]] to read the resource with the provided ID in [[resourceCollection]]
* @param uuid @see [[system.helpers.Resource.id]]
* @return An [[Action]]
*/
def read(uuid: String) =
Authorized(Some(UUID.fromString(uuid)), Set(resourceCollection.canRead))(request =>
resourceCollection.read(UUID.fromString(uuid))
.fold(NotFound(ResponseHelpers.message("The resource with ID %s could not be found." format uuid)))(item =>
Ok(Json.toJson(item)(resourceCollection.writes))
)
)
/**
* An [[Action]] to update the resource with the provided ID in [[resourceCollection]]
* @param uuid @see [[system.helpers.Resource.id]]
* @return An [[Action]]
*/
def update(uuid: String) =
Authorized(Some(UUID.fromString(uuid)), Set(resourceCollection.canModify))(parse.json)(request =>
resourceCollection.validateArguments(request.data.as[Map[String, JsValue]]) filterNot (_._2 == PropertyErrorCodes.NO_VALUE) match {
case m if m.isEmpty =>
if (resourceCollection.update(UUID.fromString(uuid), request.data.as[Map[String, JsValue]]))
Accepted
else
InternalServerError(ResponseHelpers.message("Something broke."))
case m =>
BadRequest(ResponseHelpers.invalidFields(m))
}
)
/**
* An [[Action]] to delete the resource with the provided ID in [[resourceCollection]]
* @param uuid @see [[system.helpers.Resource.id]]
* @return An [[Action]]
*/
def delete(uuid: String) =
Authorized(Some(UUID.fromString(uuid)), Set(resourceCollection.canDelete))(request =>
if (resourceCollection.delete(UUID.fromString(uuid)))
NoContent
else
NotFound(ResponseHelpers.message("The resource with ID %s could not be found." format uuid))
)
} | Xanho/xanho-api | app/controllers/helpers/CRUDController.scala | Scala | apache-2.0 | 2,999 |
package com.itszuvalex.itszulib.implicits
import com.itszuvalex.itszulib.util.StringUtils
import net.minecraft.item.ItemStack
import net.minecraft.nbt.NBTTagCompound
import net.minecraftforge.oredict.OreDictionary
import scala.collection.JavaConversions._
/**
* Created by Christopher Harris (Itszuvalex) on 10/11/14.
*/
object ItemStackImplicits {
implicit class ItemStackImplicits(i: ItemStack) {
def toModQualifiedString: String = StringUtils.itemStackToString(i)
}
implicit class StringItemStackImplicits(i: String) {
def toItemStack: ItemStack = StringUtils.itemStackFromString(i)
}
implicit class ItemStackArrayImplicits(i: Array[ItemStack]) {
def deepCopy: Array[ItemStack] = i.map(f => if (f == null) null else f.copy)
}
implicit class ItemStackOreDictionaryComparison(item: ItemStack) {
def ==(oreDictionary: String) = isOre(oreDictionary)
def isOre(oreDictionary: String) = OreDictionary.getOres(oreDictionary)
.exists(ItemStack.areItemStacksEqual(_, item))
}
implicit class ForcedNBT(i: ItemStack) {
def forceTag: NBTTagCompound = {
if (!i.hasTagCompound)
i.stackTagCompound = new NBTTagCompound
i.getTagCompound
}
}
}
| BlockWorker/ItszuLib | src/main/scala/com/itszuvalex/itszulib/implicits/ItemStackImplicits.scala | Scala | gpl-2.0 | 1,250 |
package com.tajpure.scheme.compiler.parser
import com.tajpure.scheme.compiler.Constants
import com.tajpure.scheme.compiler.Scope
import com.tajpure.scheme.compiler.ast.Argument
import com.tajpure.scheme.compiler.ast.Block
import com.tajpure.scheme.compiler.ast.Call
import com.tajpure.scheme.compiler.ast.Define
import com.tajpure.scheme.compiler.ast.Func
import com.tajpure.scheme.compiler.ast.If
import com.tajpure.scheme.compiler.ast.Let
import com.tajpure.scheme.compiler.ast.Node
import com.tajpure.scheme.compiler.ast.Symbol
import com.tajpure.scheme.compiler.ast.Tuple
import com.tajpure.scheme.compiler.exception.ParserException
object Parser {
@throws(classOf[ParserException])
def parse(_path: String): Node = {
val preParser: PreParser = new PreParser(_path)
val preNode: Node = preParser.parse()
parseNode(preNode)
}
@throws(classOf[ParserException])
def parse(_source: String, _path: String): Node = {
val preParser: PreParser = new PreParser(_source, _path)
val preNode: Node = preParser.parse()
parseNode(preNode)
}
@throws(classOf[ParserException])
def parseNode(preNode: Node): Node = {
if (!preNode.isInstanceOf[Tuple]) {
preNode
}
else {
val tuple: Tuple = preNode.asInstanceOf[Tuple]
val elements: List[Node] = tuple.elements
if (elements.isEmpty) {
throw new ParserException("syntax error", tuple)
}
else {
val curNode: Node = elements(0)
if (curNode.isInstanceOf[Symbol]) {
curNode.asInstanceOf[Symbol].id match {
case Constants.DEFINE => parseDefine(tuple)
case Constants.IF => parseIf(tuple)
case Constants.LET => parseLet(tuple)
case Constants.LAMBDA => parseLambda(tuple)
case Constants.BEGIN => parseBlock(tuple)
case default => parseCall(tuple)
}
}
else {
parseCall(tuple)
}
}
}
}
@throws(classOf[ParserException])
def parseBlock(tuple: Tuple): Node = {
val elements: List[Node] = tuple.elements
val statements = parseList(elements.slice(1, elements.size))
new Block(statements, tuple)
}
@throws(classOf[ParserException])
def parseDefine(tuple: Tuple): Node = {
val elements: List[Node] = tuple.elements
if (elements.size < 3) {
throw new ParserException("incorrect format of definition", tuple)
}
else {
if (elements(1).isInstanceOf[Tuple]) {
val funcTuple = elements(1).asInstanceOf[Tuple]
val funcElements = funcTuple.elements
val pattern: Node = parseNode(funcElements(0))
val paramsTuple = new Tuple(funcElements.slice(1, funcElements.size), funcTuple)
val lambdaElements = elements.slice(2, elements.size).:::(List(Symbol.genSymbol(Constants.LAMBDA), paramsTuple))
val lambdaTuple = new Tuple(lambdaElements, funcTuple)
val value: Node = parseNode(lambdaTuple)
new Define(pattern, value, tuple)
} else {
val pattern: Node = parseNode(elements(1))
val value: Node = parseNode(elements(2))
new Define(pattern, value, tuple)
}
}
}
@throws(classOf[ParserException])
def parseIf(tuple: Tuple): If = {
val elements: List[Node] = tuple.elements
if (elements.size < 3 || elements.size > 4) {
throw new ParserException("incorrect format of if", tuple)
}
val test: Node = parseNode(elements(1))
val then: Node = parseNode(elements(2))
val _else: Node = if (elements.size == 4) {
parseNode(elements(3))
} else {
null
}
new If(test, then, _else, tuple)
}
@throws(classOf[ParserException])
def parseLet(tuple: Tuple): Node = {
val elements = tuple.elements
if (elements.size < 3) {
throw new ParserException("incorrect format of let", tuple)
}
val bindings =
if (elements(1).isInstanceOf[Tuple]) {
parseBindings(elements(1).asInstanceOf[Tuple])
} else {
throw new ParserException("incorrect format of bindings", tuple)
}
val statements = parseList(elements.slice(2, elements.size))
val start: Int = statements(0).start
val end: Int = statements(statements.size - 1).end
val body: Block = new Block(statements, tuple.file, start, end, tuple.row, tuple.col)
new Let(bindings, body, tuple)
}
@throws(classOf[ParserException])
def parseBindings(tuple: Tuple): List[Node] = {
val elements = tuple.elements
elements.map { element => {
if (element.isInstanceOf[Tuple]) {
val origin = element.asInstanceOf[Tuple]
val define = new Tuple(Symbol.genSymbol(Constants.DEFINE)::origin.elements, origin.open, origin.close, origin)
parseDefine(define)
}
else {
throw new ParserException("incorrect format of bindings", tuple)
}
} }
}
@throws(classOf[ParserException])
def parseLambda(tuple: Tuple): Node = {
val elements: List[Node] = tuple.elements
if (elements.size < 3) {
throw new ParserException("incorrect format of function", tuple)
}
val params: Node = elements(1)
// the type of the parameters must be "Name" or "Tuple"
if (params.isInstanceOf[Tuple]) {
params.asInstanceOf[Tuple].elements.map { node =>
if (!node.isInstanceOf[Symbol]) {
throw new ParserException("can't pass as an argument:" + node.toString(), node)
}
}
} else {
if (!params.isInstanceOf[Symbol]) {
throw new ParserException("can't pass as an argument:" + params.toString(), params)
}
}
val statements: List[Node] = parseList(elements.slice(2, elements.size))
val start: Int = statements(0).start
val end: Int = statements(statements.size - 1).end
val body: Block = new Block(statements, tuple.file, start, end, tuple.row, tuple.col)
val properties: Scope = null
new Func(params, properties, body, tuple)
}
@throws(classOf[ParserException])
def parseCall(tuple: Tuple): Node = {
val elements: List[Node] = tuple.elements
val func: Node = parseNode(tuple.elements(0))
val parsedArgs: List[Node] = parseList(elements.slice(1, elements.size))
val argument: Argument = new Argument(parsedArgs)
new Call(func, argument, tuple)
}
@throws(classOf[ParserException])
def parseList(preNodes: List[Node]): List[Node] = {
preNodes.map { node => parseNode(node) }
}
// parse("./src/test/resources/scheme/helloworld.scm").interp(Scope.buildInitScope())
def parseSource(source: String): String = {
parse(source, "/visual").toString()
}
} | tajpure/SoScheme | src/main/scala/com/tajpure/scheme/compiler/parser/Parser.scala | Scala | gpl-3.0 | 6,686 |
package org.kokho.scheduling.rts.multicritical
import org.kokho.scheduling.{PeriodicTask, SynchronousTask, ImplicitDeadlineTask, Task}
/**
* Created with IntelliJ IDEA on 5/28/15.
* @author: Mikhail Kokho
*/
trait MulticriticalTask extends Task
with ImplicitDeadlineTask
{
}
| mkokho/dynoslack | old_sources/main/scala/kokho/scheduling/rts/multicritical/MulticriticalTask.scala | Scala | apache-2.0 | 282 |
package edu.gemini.spModel.rich.pot.spdb
/**
*
*/
import edu.gemini.pot.sp.{ISPProgram, ISPNode}
import edu.gemini.pot.spdb.{IDBDatabaseService, DBAbstractQueryFunctor}
import scala.collection.mutable.ListBuffer
import java.security.Principal
/**
* Creates a Stream of science programs from the database. Restricted to this
* package in order to ensure that it is used inside of a functor.
*/
private[spdb] class OdbProgramList extends DBAbstractQueryFunctor with Serializable {
val lst = ListBuffer.empty[ISPProgram]
def result: List[ISPProgram] =
lst.toList
def execute(db: IDBDatabaseService, node: ISPNode, ps: java.util.Set[Principal]): Unit =
lst.append(node.asInstanceOf[ISPProgram])
}
object OdbProgramList {
def apply(db: IDBDatabaseService, user: java.util.Set[Principal]): List[ISPProgram] = {
val qr = db.getQueryRunner(user)
val funIn = new OdbProgramList
val funOut = qr.queryPrograms(funIn).asInstanceOf[OdbProgramList]
funOut.result
}
}
| arturog8m/ocs | bundle/edu.gemini.pot/src/main/scala/edu/gemini/spModel/rich/pot/spdb/OdbProgramList.scala | Scala | bsd-3-clause | 1,007 |
package org.scalatra
import java.io.{ ByteArrayOutputStream, ObjectOutputStream }
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{ BeforeAndAfterEach, FunSuite, Matchers }
import skinny.micro.contrib.flash.FlashMap
@RunWith(classOf[JUnitRunner])
class FlashMapTest extends FunSuite with Matchers with BeforeAndAfterEach {
var flash: FlashMap = _
override def beforeEach = flash = new FlashMap()
test("values are visible immmediately") {
flash("foo") = "bar"
flash.get("foo") should equal(Some("bar"))
}
test("gotten values are removed on sweep") {
flash("foo") = "bar"
flash.get("foo")
flash.get("foo") should equal(Some("bar"))
flash.sweep()
flash.get("foo") should equal(None)
}
test("ungotten values are not removed on sweep") {
flash("foo") = "bar"
flash.sweep()
flash.get("foo") should equal(Some("bar"))
}
test("values are overwritten immediately") {
flash("foo") = "bar"
flash.sweep()
flash.get("foo") should equal(Some("bar"))
flash("foo") = "baz"
flash.get("foo") should equal(Some("baz"))
}
test("values overwritten since last gotten are not removed on sweep") {
flash("foo") = "bar"
flash.get("foo")
flash("foo") = "baz"
flash.sweep()
flash.get("foo") should equal(Some("baz"))
}
test("gotten keys are not remembered across sweeps") {
flash("foo") = "bar"
flash.get("foo")
flash.sweep()
flash("foo") = "baz"
flash.sweep()
flash.get("foo") should equal(Some("baz"))
}
test("values are removed immediately") {
flash("foo") = "bar"
flash -= "foo"
flash.get("foo") should equal(None)
}
test("iterates over previously and currently added keys") {
flash("one") = 1
flash("two") = 2
flash.sweep()
flash("three") = 3
flash.toSet should equal(Set("one" -> 1, "two" -> 2, "three" -> 3))
}
test("iterated keys are removed on sweep") {
val keys = Set("1", "2")
keys foreach { k => flash(k) = true }
// Iteration order is unspecified
val (gottenKey, _) = flash.iterator.next
val ungottenKey = (keys - gottenKey).head
flash.sweep()
flash.get(gottenKey) should equal(None)
flash.get(ungottenKey) should equal(Some(true))
}
test("keep without arguments retains used keys through one sweep") {
flash("1") = "one"
flash("2") = "two"
flash.get("1")
flash.keep()
flash.sweep()
flash.get("1") should equal(Some("one"))
flash.get("2") should equal(Some("two"))
flash.sweep()
flash.get("1") should equal(None)
}
test("keep with an argument retains just those keys through one sweep") {
flash("1") = "one"
flash("2") = "two"
flash("3") = "three"
flash.get("1")
flash.get("2")
flash.get("3")
flash.keep("1")
flash.keep("3")
flash.sweep()
flash.get("1") should equal(Some("one"))
flash.get("2") should equal(None)
flash.get("3") should equal(Some("three"))
flash.sweep()
flash.get("1") should equal(None)
flash.get("3") should equal(None)
}
test("values set with now are visible immediately") {
flash.now("foo") = "baz"
flash.get("foo") should equal(Some("baz"))
}
test("ungotten values set with now are removed on sweep") {
flash.now("foo") = "baz"
flash.sweep()
flash.get("foo") should equal(None)
}
test("supports symbols as keys") {
flash("foo") = "bar"
flash.sweep()
flash('foo) should equal("bar")
}
test("is serializable") {
flash("foo") = "bar"
val out = new ObjectOutputStream(new ByteArrayOutputStream)
out.writeObject(flash)
}
test("flag marks all ungotten entries for sweeping") {
flash("one") = 1
flash.flag()
flash.sweep()
flash.get("one") should equal(None)
}
test("flag does not apply to entries added after flagging") {
flash.flag()
flash("one") = 1
flash.sweep()
flash.get("one") should equal(Some(1))
}
}
| xerial/skinny-micro | micro/src/test/scala/org/scalatra/FlashMapTest.scala | Scala | bsd-2-clause | 3,987 |
package com.datawizards.dqm.result
case class ValidationResult(
invalidRecords: Seq[InvalidRecord],
tableStatistics: TableStatistics,
columnsStatistics: Seq[ColumnStatistics],
groupByStatisticsList: Seq[GroupByStatistics] = Seq.empty,
invalidGroups: Seq[InvalidGroup] = Seq.empty,
invalidTableTrends: Seq[InvalidTableTrend] = Seq.empty
)
| piotr-kalanski/data-quality-monitoring | src/main/scala/com/datawizards/dqm/result/ValidationResult.scala | Scala | apache-2.0 | 540 |
package com.jasperdenkers.play.auth
import play.api.mvc.{Request, WrappedRequest}
class AuthenticatedRequest[A, B](val identity: A, val request: Request[B]) extends WrappedRequest[B](request)
object AuthenticatedRequest {
def apply[A, B](identity: A, request: Request[B]) = new AuthenticatedRequest(identity, request)
def unapply[A, B](authenticatedRequest: AuthenticatedRequest[A, B]) =
Some(authenticatedRequest.identity, authenticatedRequest.request)
}
| jasperdenkers/play-auth | core/app/com/jasperdenkers/play/auth/AuthenticatedRequest.scala | Scala | mit | 470 |
package com.typesafe.slick.testkit.tests
import com.typesafe.slick.testkit.util.{RelationalTestDB, AsyncTest}
import scala.concurrent.Future
class NestingTest extends AsyncTest[RelationalTestDB] {
import tdb.profile.api._
def testNestedTuples = {
import TupleMethods._
class T(tag: Tag) extends Table[(Int, String, String)](tag, "T") {
def a = column[Int]("A")
def b = column[String]("B")
def c = column[String]("C")
def * = (a, b, c)
}
val ts = TableQuery[T]
val res1 = List(
(1, "1", "a", 5), (2, "2", "a", 5), (3, "3", "a", 5),
(1, "1", "b", 5), (2, "2", "b", 5), (3, "3", "b", 5),
(1, "1", "c", 5), (2, "2", "c", 5), (3, "3", "c", 5)
)
val res1b = res1.map { case (a, b, c, d) => ((a, b), (c, d)) }
val q1a = (for {
(a, b) <- ts.map(t => (t.a, t.b))
c <- ts.map(t => t.c)
} yield a ~ b ~ c ~ 5).sortBy(t => t._3 ~ t._1)
val q1c = (for {
a ~ b <- ts.map(t => (t.a, t.b))
c <- ts.map(t => t.c)
} yield (a, b, c, LiteralColumn(5))).sortBy(t => t._3 ~ t._1)
val q1d = (for {
(a, b) <- ts.map(t => (t.a, t.b))
c <- ts.map(t => t.c)
} yield ((a, b), (c, 5))).sortBy(t => t._2._1 ~ t._1._1)
val res2 = Set((1, "1", 8), (2, "2", 10))
val q2a = for {
a ~ b ~ c <- ts.filter(_.a === 1).map(t => t.a ~ t.b ~ 4) unionAll ts.filter(_.a === 2).map(t => t.a ~ t.b ~ 5)
} yield a ~ b ~ (c*2)
val q2b = for {
(a, b, c) <- ts.filter(_.a === 1).map(t => (t.a, t.b, LiteralColumn(4))) unionAll ts.filter(_.a === 2).map(t => (t.a, t.b, LiteralColumn(5)))
} yield a ~ b ~ (c*2)
val q2c = for {
(a, b, c) <- ts.filter(_.a === 1).map(t => (t.a, t.b, 4)) unionAll ts.filter(_.a === 2).map(t => (t.a, t.b, 5))
} yield a ~ b ~ (c*2)
seq(
ts.schema.create,
ts ++= Seq((1, "1", "a"), (2, "2", "b"), (3, "3", "c")),
q1a.result.map(_ shouldBe res1),
q1c.result.map(_ shouldBe res1),
q1d.result.map(_ shouldBe res1b),
q2a.result.map(v => v.toSet shouldBe res2),
q2b.result.map(v => v.toSet shouldBe res2),
q2c.result.map(v => v.toSet shouldBe res2)
)
}
def testNestedOptions = {
class X(tag: Tag) extends Table[(Int, String, Option[Int])](tag, "X_OPT") {
def a = column[Int]("A")
def b = column[String]("B")
def c = column[Option[Int]]("C")
def * = (a, b, c)
}
val xs = TableQuery[X]
val q = xs.sortBy(_.a)
val r = Vector((1, "1", Some(1)), (2, "2", Some(2)), (3, "3", None))
val setup = xs.schema.create >> (xs ++= r)
// Construct all kinds of Option Shapes
implicitly[Shape[_, Rep[Int], _, _]]
implicitly[Shape[_, Rep[Option[Int]], _, _]]
implicitly[Shape[_, Rep[Option[Option[Int]]], _, _]]
implicitly[Shape[_, Rep[Option[(Rep[Int], Rep[String])]], _, _]]
implicitly[Shape[_, Rep[Option[X]], _, _]]
// Construct all different kinds of Options
val q1 = q.map(t => Rep.Some(t))
val q1a2 = q.map(t => Rep.Some(Rep.Some(t)))
val q2 = q.map(t => Rep.Some(t.a))
val q2a2 = q.map(t => Rep.Some(Rep.Some(t.a)))
val q3 = q.map(t => t.c)
val q4 = q.map(t => Rep.Some(t.c))
val q5 = q.map(t => (t.c, Rep.Some(t.b)))
val q1t: Query[Rep[Option[X]], _, Seq] = q1
val q1a2t: Query[Rep[Option[Option[X]]], _, Seq] = q1a2
val q2t: Query[Rep[Option[Int]], _, Seq] = q2
val q2a2t: Query[Rep[Option[Option[Int]]], _, Seq] = q2a2
val q3t: Query[Rep[Option[Int]], _, Seq] = q3
val q4t: Query[Rep[Option[Option[Int]]], _, Seq] = q4
val q5t: Query[(Rep[Option[Int]], Rep[Option[String]]), _, Seq] = q5
lazy val t1 = seq(
mark("q1", q1.result).map(_ shouldBe r.map(t => Some(t))),
mark("q1a2", q1a2.result).map(_ shouldBe r.map(t => Some(Some(t)))),
mark("q2", q2.result).map(_ shouldBe r.map(t => Some(t._1))),
mark("q2a2", q2a2.result).map(_ shouldBe r.map(t => Some(Some(t._1)))),
mark("q3", q3.result).map(_ shouldBe r.map(t => t._3)),
mark("q4", q4.result).map(_ shouldBe r.map(t => Some(t._3))),
mark("q5", q5.result).map(_ shouldBe r.map(t => (t._3, Some(t._2))))
)
// Get plain values out
val q1b = q1.map(_.map(x => (x.a, x.b, x.c)).getOrElse((0, "", None: Option[Int])))
val q2b = q2.map(_.get)
val q3b = q3.filter(_.isDefined).map(_.get)
val q4b = q4.map(_.getOrElse(None: Option[Int]))
val q1bt: Query[(Rep[Int], Rep[String], Rep[Option[Int]]), _, Seq] = q1b
val q2bt: Query[Rep[Int], _, Seq] = q2b
val q3bt: Query[Rep[Int], _, Seq] = q3b
val q4bt: Query[Rep[Option[Int]], _, Seq] = q4b
lazy val t2 = seq(
mark("q1b", q1b.result).map(_ shouldBe r.map(t => Some(t)).map(_.getOrElse((0, "", None: Option[String])))),
mark("q2b", q2b.result).map(_ shouldBe r.map(t => Some(t._1)).map(_.get)),
mark("q3b", q3b.result).map(_ shouldBe r.map(t => t._3).filter(_.isDefined).map(_.get)),
mark("a4b", q4b.result).map(_ shouldBe r.map(t => Some(t._3)).map(_.getOrElse(None: Option[String])))
)
// Unpack result types
def r1: Future[Seq[Option[(Int, String, Option[Int])]]] = db.run(q1.result)
def r2: Future[Seq[Option[Int]]] = db.run(q2.result)
def r3: Future[Seq[Option[Int]]] = db.run(q3.result)
def r2b: Future[Seq[Int]] = db.run(q2b.result)
def r3b: Future[Seq[Int]] = db.run(q3b.result)
// Perform Option-mapped operations
val q2c = q2.map(io => io + 42)
val q3c = q3.map(so => so + 10)
lazy val t3 = seq(
mark("q2c", q2c.result).map(_ shouldBe r.map(t => Some(t._1)).map(_.map(_ + 42))),
mark("q3c", q3c.result).map(_ shouldBe r.map(t => t._3).map(_.map(_ + 10)))
)
// Use Option.map
val q1d = q1.map(_.map(_.a))
val q1d2 = q1.map(_.map(x => (x.a, x.b, x.c)))
val q2d = q2.map { io: Rep[Option[Int]] =>
io.map { i: Rep[Int] =>
i + 1
}
}
val q3d = q3.map(_.map(s => (s, s, 1)))
val q4d = q4.map(_.filter(_.isDefined).map(_.getOrElse(0)))
val q1dt: Query[Rep[Option[Int]], _, Seq] = q1d
val q1d2t: Query[Rep[Option[(Rep[Int], Rep[String], Rep[Option[Int]])]], _, Seq] = q1d2
val q2dt: Query[Rep[Option[Int]], _, Seq] = q2d
val q3dt: Query[Rep[Option[(Rep[Int], Rep[Int], ConstColumn[Int])]], _, Seq] = q3d
val q4dt: Query[Rep[Option[Int]], _, Seq] = q4d
lazy val t4 = seq(
q1d.result.named("q1d").map(_ shouldBe r.map(t => Some(t)).map(_.map(_._1))),
q1d2.result.named("q1d2").map(_ shouldBe r.map(t => Some(t)).map(_.map(x => (x._1, x._2, x._3)))),
q2d.result.named("q2d").map(_ shouldBe r.map(t => Some(t._1)).map(_.map(_ + 1))),
q3d.result.named("q3d").map(_ shouldBe r.map(t => t._3).map(_.map(s => (s, s, 1)))),
q4d.result.named("q4d").map(_ shouldBe r.map(t => Some(t._3)).map(_.filter(_.isDefined).map(_.get)))
)
// Use Option.flatMap
val q1e1 = q1.map { to => to.flatMap { t => Rep.Some(t.b) }}
val q1e2 = q1.map { to => to.flatMap { t => t.c }}
val q1e3 = q1.map(to => Rep.Some(to)).map(_.flatMap(identity))
val q2e = q2.map { io => io.flatMap { i => Rep.Some(i) }}
val q1e1t: Query[Rep[Option[String]], _, Seq] = q1e1
val q1e2t: Query[Rep[Option[Int]], _, Seq] = q1e2
val q2et: Query[Rep[Option[Int]], _, Seq] = q2e
lazy val t5 = seq(
mark("q1e1", q1e1.result).map(_ shouldBe r.map(t => Some(t)).map { to => to.flatMap { t => Some(t._2) }}),
mark("q1e2", q1e2.result).map(_ shouldBe r.map(t => Some(t)).map { to => to.flatMap { t => t._3 }}),
mark("q1e3", q1e3.result).map(_ shouldBe r.map(t => Some(t)).map(to => Some(to)).map(_.flatMap(identity))),
mark("q2e", q2e.result).map(_ shouldBe r.map(t => Some(t._1)).map { io => io.flatMap { i => Some(i) }})
)
// Use Option.flatten
val q1f1 = q1.map { to => Rep.Some(to) }
val q1f2 = q1.map { to => Rep.Some(to).flatten }
val q1f3 = q1.map { to => Rep.Some(to) }.map(_.flatten)
val q2f1 = q2.map { io => Rep.Some(io) }
val q2f2 = q2.map { io => Rep.Some(io).flatten }
val q2f3 = q2.map { io => Rep.Some(io) }.map(_.flatten)
val q1f1t: Query[Rep[Option[Option[X]]], _, Seq] = q1f1
val q1f2t: Query[Rep[Option[X]], _, Seq] = q1f2
val q1f3t: Query[Rep[Option[X]], _, Seq] = q1f3
val q2f1t: Query[Rep[Option[Option[Int]]], _, Seq] = q2f1
val q2f2t: Query[Rep[Option[Int]], _, Seq] = q2f2
val q2f3t: Query[Rep[Option[Int]], _, Seq] = q2f3
lazy val t6 = seq(
q1f1.result.named("q1f1").map(_ shouldBe Vector(Some(Some((1,"1",Some(1)))), Some(Some((2,"2",Some(2)))), Some(Some((3,"3",None))))),
q1f2.result.named("q1f2").map(_ shouldBe r.map(t => Some(t)).map { to => Some(to).flatten }),
q1f3.result.named("q1f3").map(_ shouldBe r.map(t => Some(t)).map { to => Some(to) }.map(_.flatten)),
q2f1.result.named("q2f1").map(_ shouldBe r.map(t => Some(t._1)).map { io => Some(io) }),
q2f2.result.named("q2f2").map(_ shouldBe r.map(t => Some(t._1)).map { io => Some(io).flatten }),
q2f3.result.named("q2f3").map(_ shouldBe r.map(t => Some(t._1)).map { io => Some(io) }.map(_.flatten))
)
setup >> t1 >> t2 >> t3 >> t4 >> t5 >> t6
}
def testGetOrElse = {
case class Chord(name: String, popularOptions: String, id: Long = -1L)
class Chords(tag: Tag) extends Table[Chord](tag, "chords") {
def id = column[Long]("id", O.PrimaryKey, O.AutoInc)
def name = column[Option[String]]("name")
def popularOptions = column[Option[String]]("popularOptions")
def * = (name.getOrElse(""), popularOptions.getOrElse(""), id).mapTo[Chord]
}
val chords = TableQuery[Chords]
val allChords = Set(Chord("maj7", "9 #11"), Chord("m7", "9 11"), Chord("7", "9 13"), Chord("m7b5", "11"), Chord("aug7", "9"), Chord("dim7", ""))
val minorChords = for {
chord <- chords if chord.name.startsWith("m7")
} yield (chord.name.getOrElse(""), chord.popularOptions.getOrElse(""))
val otherChords = for {
chord <- chords if !chord.name.startsWith("m7")
} yield (chord.name.getOrElse(""), chord.popularOptions.getOrElse(""))
DBIO.seq(
chords.schema.create,
chords ++= allChords,
(minorChords ++ otherChords).result.map(_.toSet shouldBe allChords.map(c => (c.name, c.popularOptions)))
)
}
}
| AtkinsChang/slick | slick-testkit/src/main/scala/com/typesafe/slick/testkit/tests/NestingTest.scala | Scala | bsd-2-clause | 10,299 |
/*
* Copyright (C) 2007-2008 Artima, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Example code from:
*
* Programming in Scala (First Edition, Version 6)
* by Martin Odersky, Lex Spoon, Bill Venners
*
* http://booksites.artima.com/programming_in_scala
*/
trait SimpleFoods {
object Pear extends Food("Pear")
def allFoods = List(Apple, Pear)
def allCategories = Nil
}
| peachyy/scalastu | modules/SimpleFoods.scala | Scala | apache-2.0 | 928 |
package com.chrisomeara.pillar.core
import java.io.{File, FileInputStream}
import java.util.Date
object Registry {
def apply(migrations: Seq[Migration]): Registry = {
new Registry(migrations)
}
def fromDirectory(directory: File, reporter: Reporter): Registry = {
new Registry(parseMigrationsInDirectory(directory).map(new ReportingMigration(reporter, _)))
}
def fromDirectory(directory: File): Registry = {
new Registry(parseMigrationsInDirectory(directory))
}
def fromFiles(files: Seq[File]): Registry = {
new Registry(parseMigrationsInFiles(filterExisting(files)))
}
def fromFiles(files: Seq[File], reporter: Reporter): Registry = {
new Registry(
parseMigrationsInFiles(filterExisting(files))
.map(new ReportingMigration(reporter, _))
)
}
private def filterExisting(files : Seq[File]) : Seq[File] = {
files
.filterNot(file => file.isDirectory)
.filter(file => file.exists())
}
private def parseMigrationsInFiles(files: Seq[File]): Seq[Migration] = {
val parser = Parser()
files.map {
file =>
val stream = new FileInputStream(file)
try {
parser.parse(stream)
} finally {
stream.close()
}
}.toList
}
private def parseMigrationsInDirectory(directory: File): Seq[Migration] = {
if (!directory.isDirectory)
return List.empty
parseMigrationsInFiles(directory.listFiles())
}
}
class Registry(private var migrations: Seq[Migration]) {
migrations = migrations.sortBy(_.authoredAt)
private val migrationsByKey = migrations.foldLeft(Map.empty[MigrationKey, Migration]) {
(memo, migration) => memo + (migration.key -> migration)
}
def authoredBefore(date: Date): Seq[Migration] = {
migrations.filter(migration => migration.authoredBefore(date))
}
def apply(key: MigrationKey): Migration = {
migrationsByKey(key)
}
def all: Seq[Migration] = migrations
} | comeara/pillar-core | src/main/scala/com/chrisomeara/pillar/core/Registry.scala | Scala | mit | 1,956 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala
package reflect
/** One of the branches of an [[scala.reflect.OptManifest]].
*/
// TODO undeprecated until Scala reflection becomes non-experimental
// @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0")
object NoManifest extends OptManifest[Nothing] with Serializable {
override def toString = "<?>"
} | felixmulder/scala | src/library/scala/reflect/NoManifest.scala | Scala | bsd-3-clause | 1,034 |
/*
* Copyright 2012-2014 Comcast Cable Communications Management, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comcast.xfinity.sirius.api.impl.paxos
import org.scalatest.BeforeAndAfterAll
import com.comcast.xfinity.sirius.NiceTest
import akka.testkit.{TestActorRef, TestProbe}
import akka.actor.{ActorContext, ActorRef, ActorSystem}
import com.comcast.xfinity.sirius.api.impl.paxos.PaxosMessages._
import scala.concurrent.duration._
import com.comcast.xfinity.sirius.api.impl.{Delete, Put}
import com.comcast.xfinity.sirius.api.SiriusConfiguration
import com.comcast.xfinity.sirius.util.AkkaExternalAddressResolver
import scala.language.postfixOps
class PaxosSupervisorTest extends NiceTest with BeforeAndAfterAll {
implicit val actorSystem = ActorSystem("PaxosSupTest")
def createPaxosSup(leader: ActorRef = TestProbe().ref,
acceptor: ActorRef = TestProbe().ref,
replica: ActorRef = TestProbe().ref)
(implicit actorSystem: ActorSystem): ActorRef = {
val testConfig = new SiriusConfiguration
testConfig.setProp(SiriusConfiguration.AKKA_EXTERNAL_ADDRESS_RESOLVER, AkkaExternalAddressResolver(actorSystem)(testConfig))
val childProvider = new PaxosSupervisor.ChildProvider(null, 0L, null, testConfig) {
override def createLeader()(implicit context: ActorContext) = leader
override def createAcceptor()(implicit context: ActorContext) = acceptor
override def createReplica(leader: ActorRef)(implicit context: ActorContext) = replica
}
TestActorRef(new PaxosSupervisor(childProvider))
}
override def afterAll(): Unit = {
actorSystem.terminate()
}
describe("A PaxosSup") {
it ("must properly forward messages to its children") {
val leaderProbe = TestProbe()
val acceptorProbe = TestProbe()
val replicaProbe = TestProbe()
val paxosSup = createPaxosSup(leaderProbe.ref, acceptorProbe.ref, replicaProbe.ref)
val senderProbe = TestProbe()
val decision = Decision(1, Command(null, 1, Delete("2")))
senderProbe.send(paxosSup, decision)
replicaProbe.expectMsg(decision)
assert(senderProbe.ref === replicaProbe.lastSender)
val propose = Propose(1, Command(null, 1, Delete("2")))
senderProbe.send(paxosSup, propose)
leaderProbe.expectMsg(propose)
assert(senderProbe.ref === leaderProbe.lastSender)
val decisionHint = DecisionHint(1L)
senderProbe.send(paxosSup, decisionHint)
replicaProbe.expectMsg(decisionHint)
assert(senderProbe.ref === replicaProbe.lastSender)
val phase1A = Phase1A(senderProbe.ref, Ballot(1, "a"), senderProbe.ref,1L)
senderProbe.send(paxosSup, phase1A)
acceptorProbe.expectMsg(phase1A)
assert(senderProbe.ref === acceptorProbe.lastSender)
val phase2A = Phase2A(senderProbe.ref, PValue(Ballot(1, "a"), 1,
Command(null, 1, Delete("2"))), senderProbe.ref)
senderProbe.send(paxosSup, phase2A)
acceptorProbe.expectMsg(phase2A)
assert(senderProbe.ref === acceptorProbe.lastSender)
}
it ("must properly translate a NonCommutativeSiriusRequest" +
" to a Request and forward it into the system") {
val replicaProbe = TestProbe()
val paxosSup = createPaxosSup(replica = replicaProbe.ref)
val senderProbe = TestProbe()
val delete = Delete("a")
senderProbe.send(paxosSup, delete)
replicaProbe.receiveOne(1 second) match {
case Request(Command(sender, ts, req)) =>
assert(senderProbe.ref === sender)
// accept some tolerance on timestamp
assert(System.currentTimeMillis() - ts < 5000)
assert(req === delete)
}
val put = Put("a", "bc".getBytes)
senderProbe.send(paxosSup, put)
replicaProbe.receiveOne(1 second) match {
case Request(Command(sender, ts, req)) =>
assert(senderProbe.ref === sender)
// accept some tolerance on timestamp
assert(System.currentTimeMillis() - ts < 5000)
assert(req === put)
}
}
}
}
| Comcast/sirius | src/test/scala/com/comcast/xfinity/sirius/api/impl/paxos/PaxosSupervisorTest.scala | Scala | apache-2.0 | 4,650 |
package org.backuity.puppet
import java.io.{File, FileNotFoundException}
import java.nio.file.{StandardCopyOption, CopyOption, Files, Path}
trait Git {
/** @return true if dir is a git repository */
def isGit(dir: Path) : Boolean
def update(source: String, ref: Option[String], destination: Path): Unit
def clone(source: String, ref: Option[String], destination: Path) : Unit
def currentBranch(dir: Path): String
/** @throws FileNotFoundException */
@throws(clazz = classOf[FileNotFoundException])
def downloadFile(fileName: String, uri: String, tag: Option[String]): Path
def lsRemoteTags(uri: String) : String
/** @see [[Version]] */
def latestTag(uri: String)(implicit log: Logger) : Option[String] = Git.latestVersion(lsRemoteTags(uri))
/* @return the highest tag for a given major version */
def latestTag(uri: String, forMajor: Int)(implicit log: Logger) : Option[String] = {
Git.latestVersion(lsRemoteTags(uri), forMajor)
}
def currentRef(dir: Path) : Git.Ref
/** @return true if the git repo at `dir` is dirty, that is,
* if it contains un committed changes
*/
def isDirty(dir: Path) : Boolean
}
object Git {
def tagsToVersions(gitOutput: String)(implicit log: Logger) : List[(String,Version.MajorMinorBugFix)] = {
gitOutput.split("\\n").flatMap { line =>
if (line.contains("refs/tags")) {
val tag = line.split("refs/tags/")(1)
if (!tag.endsWith("^{}") && tag.matches(".*[0-9]+.*")) {
try {
Some(tag,Version(tag))
} catch {
case e : IllegalArgumentException =>
log.warn(e.getMessage)
None
}
} else {
None
}
} else {
None
}
}.toList
}
def latestVersion(gitOutput: String, forMajor: Int)(implicit log: Logger) : Option[String] = {
tagsToVersions(gitOutput).filter( _._2.major == forMajor) match {
case Nil => None
case lst => Some(lst.maxBy( _._2)._1)
}
}
def latestVersion(gitOutput: String)(implicit log: Logger) : Option[String] = {
tagsToVersions(gitOutput) match {
case Nil => None
case lst => Some(lst.maxBy( _._2)._1)
}
}
sealed abstract class Ref
case class Tag(name: String) extends Ref
case class Branch(name: String) extends Ref
case class Commit(hash: String) extends Ref
class Impl(shell: Shell) extends Git {
private val tmpDir = Files.createTempDirectory("pmi")
def isGit(dir: Path) : Boolean = {
Files.isDirectory(dir.resolve(".git"))
}
def isDirty(dir: Path) : Boolean = {
! shell.exec("git status --porcelain", dir).trim.isEmpty
}
def lsRemoteTags(uri: String) : String = {
shell.exec("git ls-remote --tags " + uri, new File("."))
}
def downloadFile(fileName: String, uri: String, tag: Option[String]) : Path = {
val downloadDir = Files.createTempDirectory(tmpDir, "git")
try {
shell.exec(s"git archive --format tar --remote=$uri -o $fileName.tar ${tag.getOrElse("HEAD")} $fileName", downloadDir)
} catch {
case e @ CommandException(_,_,_,msg) =>
// the error message varies depending on the git install
if( msg.contains("path not found") || msg.contains("did not match any files") ) {
throw new FileNotFoundException(s"$fileName in $uri")
} else {
throw e
}
}
shell.exec(s"tar -xf $fileName.tar", downloadDir)
downloadDir.resolve(fileName)
}
def clone(source: String, ref: Option[String], destination: Path): Unit = {
val branch = ref.map( r => " --branch " + r).getOrElse("")
shell.exec(s"git clone$branch $source .", destination)
}
def currentBranch(dir: Path): String = {
shell.exec("git rev-parse --abbrev-ref HEAD", dir).trim
}
def update(source: String, ref: Option[String], destination: Path): Unit = {
ref match {
case None =>
if( currentBranch(destination) != "master" ) {
shell.exec(s"git checkout master", destination)
}
shell.exec(s"git pull", destination)
case Some(r) =>
shell.exec(s"git fetch", destination)
shell.exec(s"git checkout $r", destination)
}
}
def currentRef(dir: Path): Ref = {
val branch = currentBranch(dir)
if( branch == "HEAD" ) {
try {
val tag = shell.exec("git describe --tags --exact-match", dir).trim
Tag(tag)
} catch {
case e : CommandException =>
Commit(shell.exec("git rev-parse HEAD", dir).trim)
}
} else {
Branch(branch)
}
}
}
}
| backuity/puppet-module-installer | src/main/scala/org/backuity/puppet/Git.scala | Scala | apache-2.0 | 4,702 |
package org.jetbrains.plugins.scala.worksheet.ui
import com.intellij.openapi.editor.impl.FoldingModelImpl
import com.intellij.openapi.editor.{Document, Editor, VisualPosition}
import com.intellij.openapi.project.Project
import com.intellij.openapi.util.text.StringUtil
import com.intellij.psi.PsiDocumentManager
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.settings.ScalaProjectSettings
import org.jetbrains.plugins.scala.worksheet.ui.WorksheetDiffSplitters.SimpleWorksheetSplitter
/**
* User: Dmitry.Naydanov
* Date: 03.02.17.
*/
abstract class WorksheetEditorPrinterBase(protected val originalEditor: Editor,
protected val worksheetViewer: Editor) extends WorksheetEditorPrinter {
protected val viewerFolding: FoldingModelImpl = worksheetViewer.getFoldingModel.asInstanceOf[FoldingModelImpl]
protected implicit val project: Project = originalEditor.getProject
protected val originalDocument: Document = originalEditor.getDocument
protected val viewerDocument: Document = worksheetViewer.getDocument
protected lazy val group = new WorksheetFoldGroup(worksheetViewer, originalEditor, project, getWorksheetSplitter.orNull)
private var inited = false
def getViewerEditor: Editor = worksheetViewer
override def internalError(errorMessage: String): Unit = {
invokeLater {
inWriteAction {
simpleUpdate("Internal error: " + errorMessage, viewerDocument)
}
}
}
protected def getWorksheetSplitter: Option[SimpleWorksheetSplitter] =
Option(worksheetViewer.getUserData(WorksheetEditorPrinterFactory.DIFF_SPLITTER_KEY))
protected def getWorksheetViewersRation: Float =
getWorksheetSplitter.map(_.getProportion).getOrElse(WorksheetEditorPrinterFactory.DEFAULT_WORKSHEET_VIEWERS_RATIO)
protected def redrawViewerDiffs(): Unit = {
getWorksheetSplitter.foreach(_.redrawDiffs())
}
protected def saveEvaluationResult(result: String): Unit = {
WorksheetEditorPrinterFactory.saveWorksheetEvaluation(getScalaFile, result, getWorksheetViewersRation)
redrawViewerDiffs()
}
protected def cleanFoldings(): Unit = {
invokeLater {
viewerFolding.runBatchFoldingOperation(() => {
viewerFolding.clearFoldRegions()
})
worksheetViewer.getCaretModel.moveToVisualPosition(new VisualPosition(0, 0))
}
}
/**
*
* @param foldings : (Start output, End output, Input lines count, End input)*
*/
protected def updateFoldings(foldings: Seq[(Int, Int, Int, Int)]): Unit = startCommand() {
val isExpanded = !ScalaProjectSettings.getInstance(project).isWorksheetFoldCollapsedByDefault
viewerFolding.runBatchFoldingOperation(() => {
foldings.foreach {
case (start, end, limit, originalEnd) =>
val offset = originalDocument getLineEndOffset java.lang.Math.min(originalEnd, originalDocument.getLineCount)
val linesCount = viewerDocument.getLineNumber(end) - start - limit + 1
group.addRegion(viewerFolding, viewerDocument.getLineStartOffset(start + limit - 1), end,
offset, linesCount, limit, isExpanded)
}
WorksheetFoldGroup.save(getScalaFile, group)
}, false)
}
protected def isInited: Boolean = inited
protected def init(): Unit = {
inited = true
val oldSync = originalEditor getUserData WorksheetEditorPrinterFactory.DIFF_SYNC_SUPPORT
if (oldSync != null) oldSync.dispose()
group.installOn(viewerFolding)
WorksheetEditorPrinterFactory.synch(originalEditor, worksheetViewer, getWorksheetSplitter, Some(group))
cleanFoldings()
}
protected def getNewLines(count: Int): String = StringUtil.repeatSymbol('\\n', count)
protected def commitDocument(doc: Document): Unit = {
if (project.isDisposed) return //EA-70786
PsiDocumentManager.getInstance(project).commitDocument(doc)
}
protected def simpleUpdate(text: String, document: Document): Unit = {
document.setText(text)
commitDocument(document)
}
protected def simpleAppend(text: String, document: Document): Unit = {
document.insertString(document.getTextLength, text)
commitDocument(document)
}
protected def getOutputLimit: Int = ScalaProjectSettings.getInstance(project).getOutputLimit
}
| jastice/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/worksheet/ui/WorksheetEditorPrinterBase.scala | Scala | apache-2.0 | 4,289 |
package org.jetbrains.sbt
package project.data
import com.intellij.openapi.externalSystem.model.{ProjectKeys, DataNode}
import com.intellij.openapi.externalSystem.model.project.ModuleData
import com.intellij.openapi.externalSystem.service.project.{ProjectStructureHelper, PlatformFacade}
import com.intellij.openapi.module.Module
import com.intellij.openapi.project.Project
import org.jetbrains.sbt.project.module.SbtModule
import collection.JavaConverters._
import java.util
/**
* @author Pavel Fatin
*/
class SbtModuleDataService(platformFacade: PlatformFacade, helper: ProjectStructureHelper)
extends AbstractDataService[SbtModuleData, Module](SbtModuleData.Key) {
def doImportData(toImport: util.Collection[DataNode[SbtModuleData]], project: Project) {
toImport.asScala.foreach { moduleNode =>
val moduleData = moduleNode.getData
val module = {
val moduleData: ModuleData = moduleNode.getData(ProjectKeys.MODULE)
helper.findIdeModule(moduleData.getName, project)
}
SbtModule.setImportsTo(module, moduleData.imports)
}
}
def doRemoveData(toRemove: util.Collection[_ <: Module], project: Project) {}
}
| consulo/consulo-scala | SBT/src/main/scala/org/jetbrains/sbt/project/data/SbtModuleDataService.scala | Scala | apache-2.0 | 1,168 |
package tests
import org.scalatest.FlatSpec
class NonParallelTest extends FlatSpec {
it should "Write Passing Tests" in {
}
it should "Write Failing Tests" in {
fail("Test failed")
}
}
| JetBrains/sbt-tc-logger | test/testdata/testsupport/parallelTestExecutionTW43578/src/src/test/scala/tests/NonParallelTest.scala | Scala | apache-2.0 | 203 |
package scaldi
import scala.util.Random
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
class WordBinderSpec extends AnyWordSpec with Matchers {
"WordBinder" should {
"require to bind something" in {
val binder = new WordBinder {
bind[String] identifiedBy Symbol("host")
override def injector = ???
}
an[BindingException] should be thrownBy binder.wordBindings
}
"collect all identifiers for bindings" in {
val binder = new WordBinder {
bind[String] identifiedBy Symbol("host") and "httpServer" to "localhost"
bind[String] as Symbol("host") and "httpServer" to "localhost"
binding identifiedBy classOf[String] and Symbol("host") and "httpServer" to "localhost"
bind[String] to "localhost" identifiedBy Symbol("host") and "httpServer"
bind[String] to "localhost" as Symbol("host") and "httpServer"
binding to "localhost" identifiedBy classOf[String] and Symbol("host") and "httpServer"
override def injector = ???
}
binder.wordBindings should have size 7
binder.wordBindings filter (_.isDefinedFor(List(classOf[String], "host", "httpServer"))) should have size 6
}
"infer binding type only when it's not specified" in {
val binder = new WordBinder {
binding to new HttpServer("localhost", 80)
override def injector = ???
}
binder.wordBindings should have size 2
binder.wordBindings(1) isDefinedFor List(classOf[Server]) should be(true)
binder.wordBindings(1) isDefinedFor List(classOf[HttpServer]) should be(true)
}
"not infer binding type only when it is specified explicitly" in {
val binder = new WordBinder {
bind[Server] to new HttpServer("localhost", 80)
override def injector = ???
}
binder.wordBindings should have size 2
binder.wordBindings(1) isDefinedFor List(classOf[Server]) should be(true)
binder.wordBindings(1) isDefinedFor List(classOf[HttpServer]) should be(false)
}
"treat later bindings as overrides for earlier and more that one binding od the same type" in {
val binder = new DynamicModule {
bind[Server] to new HttpServer("localhost", 80)
bind[Server] to new HttpServer("www.test.com", 8080)
}.initNonLazy()
binder.wordBindings should have size 3
binder.getBinding(List(classOf[Server])).get.get should equal(Some(HttpServer("www.test.com", 8080)))
val bindings = binder.getBindings(List(classOf[Server]))
bindings should have size 2
bindings(0).get should equal(Some(HttpServer("www.test.com", 8080)))
bindings(1).get should equal(Some(HttpServer("localhost", 80)))
}
"allow to define normal lazy bindings that would be instantiated only one time" in {
var instanceCount = 0
val binder = new DynamicModule {
bind[Server] identifiedBy Symbol("server") and "httpServer" to {
instanceCount += 1
new HttpServer("localhost", Random.nextInt())
}
bind[Server] identifiedBy Symbol("otherServer") to HttpServer("test", 8080)
}.initNonLazy()
instanceCount should be(0)
(1 to 10).map(x => binder.getBinding(List("server")).get.get).distinct should have size 1
instanceCount should be(1)
binder.getBinding(List("otherServer")).get.get should equal(Some(HttpServer("test", 8080)))
}
"allow to define normal non-lazy bindings that would be instantiated only one time" in {
var instanceCount = 0
val binder = new DynamicModule {
bind[Server] identifiedBy Symbol("server") and "httpServer" toNonLazy {
instanceCount += 1
new HttpServer("localhost", Random.nextInt())
}
bind[Server] identifiedBy Symbol("otherServer") toNonLazy HttpServer("test", 8080)
}.initNonLazy()
instanceCount should be(1)
(1 to 10).map(x => binder.getBinding(List("server")).get.get).distinct should have size 1
instanceCount should be(1)
binder.getBinding(List("otherServer")).get.get should equal(Some(HttpServer("test", 8080)))
}
"allow to define provider bindings that would be instantiated each time" in {
var instanceCount = 0
val binder = new DynamicModule {
bind[Server] identifiedBy Symbol("server") and "httpServer" toProvider {
instanceCount += 1
new HttpServer("localhost", Random.nextInt())
}
bind[Server] identifiedBy Symbol("otherServer") toProvider HttpServer("test", 8080)
}.initNonLazy()
instanceCount should be(0)
(1 to 10).map(x => binder.getBinding(List("server")).get.get).distinct should have size 10
instanceCount should be(10)
binder.getBinding(List("otherServer")).get.get should equal(Some(HttpServer("test", 8080)))
}
"support conditions with 'when'" in {
var prodMode = true
var specialMode = true
val binder = new DynamicModule {
val ProdMode = Condition(prodMode)
val SpecialMode = Condition(specialMode)
val DevMode = !ProdMode
bind[String] as Symbol("host") when ProdMode to "www.prod-server.com"
bind[String] as Symbol("host") when DevMode to "localhost"
bind[Int] as Symbol("id") when ProdMode when SpecialMode to 123
bind[Int] when ProdMode as Symbol("port") to 1234
when(DevMode) {
bind[String] as Symbol("userName") to "testUser"
bind[Long] as Symbol("timeout") to 1000L
bind[String] when SpecialMode as Symbol("path") to "/index.html"
when(SpecialMode) {
bind[String] as Symbol("password") to "secret"
}
bind[Boolean] as Symbol("rememberUserName") to false
}
}
binder.wordBindings should have size 10
binder.getBinding(List(Symbol("host"))).get.get.get should equal("www.prod-server.com")
binder.getBinding(List(Symbol("port"))).get.get.get should equal(1234)
binder.getBinding(List(Symbol("userName"))) should be(empty)
binder.getBinding(List(Symbol("timeout"))) should be(empty)
binder.getBinding(List(Symbol("path"))) should be(empty)
binder.getBinding(List(Symbol("password"))) should be(empty)
binder.getBinding(List(Symbol("id"))).get.get.get should equal(123)
binder.getBinding(List(Symbol("rememberUserName"))) should be(empty)
specialMode = false
prodMode = false
binder.getBinding(List(Symbol("host"))).get.get.get should equal("localhost")
binder.getBinding(List(Symbol("port"))) should be(empty)
binder.getBinding(List(Symbol("userName"))).get.get.get should equal("testUser")
binder.getBinding(List(Symbol("timeout"))).get.get.get should be(1000L)
binder.getBinding(List(Symbol("path"))) should be(empty)
binder.getBinding(List(Symbol("password"))) should be(empty)
binder.getBinding(List(Symbol("id"))) should be(empty)
binder.getBinding(List(Symbol("rememberUserName"))).get.get.get should equal(false)
specialMode = true
binder.getBinding(List(Symbol("path"))).get.get.get should equal("/index.html")
binder.getBinding(List(Symbol("password"))).get.get.get should be("secret")
binder.getBinding(List(Symbol("id"))) should be(empty)
}
"allow to define init and destroy functions" in {
implicit val module = new DynamicModule {
bind[Server] as Symbol("server1") to new LifecycleServer initWith (_.init()) destroyWith (_.terminate())
bind[Server] as Symbol("server2") to new LifecycleServer initWith (_.init())
bind[Server] as Symbol("server3") to new LifecycleServer destroyWith (_.terminate())
}
import Injectable._
(1 to 3) foreach (i => inject[Server](s"server$i"))
val server1 = inject[Server](Symbol("server1")).asInstanceOf[LifecycleServer]
val server2 = inject[Server](Symbol("server2")).asInstanceOf[LifecycleServer]
val server3 = inject[Server](Symbol("server3")).asInstanceOf[LifecycleServer]
server1.initializedCount should equal(1)
server1.destroyedCount should equal(0)
server2.initializedCount should equal(1)
server2.destroyedCount should equal(0)
server3.initializedCount should equal(0)
server3.destroyedCount should equal(0)
module.destroy()
module.destroy()
module.destroy()
server1.initializedCount should equal(1)
server1.destroyedCount should equal(1)
server2.initializedCount should equal(1)
server2.destroyedCount should equal(0)
server3.initializedCount should equal(0)
server3.destroyedCount should equal(1)
}
}
}
| scaldi/scaldi | src/test/scala/scaldi/WordBinderSpec.scala | Scala | apache-2.0 | 8,749 |
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.twitter.zipkin.query
import com.twitter.conversions.time._
import com.twitter.finagle.stats.{StatsReceiver, NullStatsReceiver}
import com.twitter.finagle.tracing.{Trace => FTrace}
import com.twitter.logging.Logger
import com.twitter.ostrich.admin.Service
import com.twitter.util.Future
import com.twitter.zipkin.conversions.thrift._
import com.twitter.zipkin.gen
import com.twitter.zipkin.query.adjusters.Adjuster
import com.twitter.zipkin.storage._
import java.nio.ByteBuffer
import java.util.concurrent.atomic.AtomicBoolean
import org.apache.thrift.TException
import scala.collection.Set
/**
* Able to respond to users queries regarding the traces. Usually does so
* by lookup the information in the index and then fetch the required trace data
* from the storage.
*/
class QueryService(storage: Storage, index: Index, aggregates: Aggregates, adjusterMap: Map[gen.Adjust, Adjuster],
statsReceiver: StatsReceiver = NullStatsReceiver) extends gen.ZipkinQuery.FutureIface with Service {
private val log = Logger.get
private val running = new AtomicBoolean(false)
private val stats = statsReceiver.scope("QueryService")
private val methodStats = stats.scope("methods")
private val errorStats = stats.scope("errors")
private val timingStats = stats.scope("timing")
// how to sort the trace summaries
private val OrderByDurationDesc = {
(a: TraceIdDuration, b: TraceIdDuration) => a.duration > b.duration
}
private val OrderByDurationAsc = {
(a: TraceIdDuration, b: TraceIdDuration) => a.duration < b.duration
}
private val OrderByTimestampDesc = {
(a: TraceIdDuration, b: TraceIdDuration) => a.startTimestamp > b.startTimestamp
}
private val OrderByTimestampAsc = {
(a: TraceIdDuration, b: TraceIdDuration) => a.startTimestamp < b.startTimestamp
}
// this is how many trace durations we fetch in one request
// TODO config
var traceDurationFetchBatchSize = 500
def start() {
running.set(true)
}
def shutdown() {
running.set(false)
storage.close
index.close
aggregates.close
}
private def constructQueryResponse(indexedIds: Seq[IndexedTraceId], limit: Int, order: gen.Order, defaultEndTs: Long = -1): Future[gen.QueryResponse] = {
val ids = indexedIds.map { _.traceId }
val ts = indexedIds.map { _.timestamp }
sortTraceIds(Future(ids), limit, order).map { sortedIds =>
val (min, max) = sortedIds match {
case Nil => (-1L, defaultEndTs)
case _ => (ts.min, ts.max)
}
gen.QueryResponse(sortedIds, min, max)
}
}
def getTraceIds(queryRequest: gen.QueryRequest): Future[gen.QueryResponse] = {
val method = "getTraceIds"
log.debug("%s: %s".format(method, queryRequest.toString))
call(method) {
val serviceName = queryRequest.`serviceName`
val spanName = queryRequest.`spanName`
val endTs = queryRequest.`endTs`
val limit = queryRequest.`limit`
val order = queryRequest.`order`
val sliceQueries = Seq(
spanName.map { name =>
Seq(SpanSliceQuery(serviceName, name, endTs, 1))
},
queryRequest.`annotations`.map {
_.map { a =>
AnnotationSliceQuery(serviceName, a, None, endTs, 1)
}
},
queryRequest.`binaryAnnotations`.map {
_.map { b =>
AnnotationSliceQuery(serviceName, b.`key`, Some(b.`value`), endTs, 1)
}
}
).collect {
case Some(q: Seq[SliceQuery]) => q
}.flatten
log.debug(sliceQueries.toString())
sliceQueries match {
case Nil => {
/* No queries: get service level traces */
index.getTraceIdsByName(serviceName, None, endTs, limit).map {
constructQueryResponse(_, limit, order)
}.flatten
}
case head :: Nil => {
/* One query: just run it */
(head match {
case s: SpanSliceQuery => s.copy(limit = limit)
case a: AnnotationSliceQuery => a.copy(limit = limit)
}).execute(index).map {
constructQueryResponse(_, limit, order)
}.flatten
}
case queries => {
/* Multiple: Fetch a single column from each to reconcile non-overlapping portions
then fetch the entire slice */
Future.collect {
queries.map {
_.execute(index)
}
}.map {
_.flatten.map {
_.timestamp
}.min
}.map { alignedTimestamp =>
/* Pad the aligned timestamp by a minute */
val ts = padTimestamp(alignedTimestamp)
Future.collect {
queries.map {
case s: SpanSliceQuery => s.copy(endTs = ts, limit = limit).execute(index)
case a: AnnotationSliceQuery => a.copy(endTs = ts, limit = limit).execute(index)
}
}.map { ids =>
traceIdsIntersect(ids) match {
case Nil => {
val endTimestamp = ids.map {
_.map { _.timestamp }.min
}.max
constructQueryResponse(Nil, limit, order, endTimestamp)
}
case seq => {
constructQueryResponse(seq, limit, order)
}
}
}
}.flatten.flatten
}
}
}
}
private[query] def padTimestamp(timestamp: Long): Long = timestamp + Constants.TraceTimestampPadding.inMicroseconds
private[query] def traceIdsIntersect(idSeqs: Seq[Seq[IndexedTraceId]]): Seq[IndexedTraceId] = {
/* Find the trace IDs present in all the Seqs */
val idMaps = idSeqs.map {
_.groupBy {
_.traceId
}
}
val traceIds = idMaps.map {
_.keys.toSeq
}
val commonTraceIds = traceIds.tail.fold(traceIds(0)) { _.intersect(_) }
/*
* Find the timestamps associated with each trace ID and construct a new IndexedTraceId
* that has the trace ID's maximum timestamp (ending) as the timestamp
*/
commonTraceIds.map { id =>
val maxTime = idMaps.map { m =>
m(id).map { _.timestamp }
}.flatten.max
IndexedTraceId(id, maxTime)
}
}
def getTraceIdsBySpanName(serviceName: String, spanName: String, endTs: Long,
limit: Int, order: gen.Order): Future[Seq[Long]] = {
val method = "getTraceIdsBySpanName"
log.debug("%s. serviceName: %s spanName: %s endTs: %s limit: %s order: %s".format(method, serviceName, spanName,
endTs, limit, order))
call(method) {
if (serviceName == null || "".equals(serviceName)) {
errorStats.counter("%s_no_service".format(method)).incr()
return Future.exception(gen.QueryException("No service name provided"))
}
// do we have a valid span name to query indexes by?
val span = convertToOption(spanName)
FTrace.recordBinary("serviceName", serviceName)
FTrace.recordBinary("spanName", spanName)
FTrace.recordBinary("endTs", endTs)
FTrace.recordBinary("limit", limit)
FTrace.recordBinary("order", order)
val traceIds = index.getTraceIdsByName(serviceName, span, endTs, limit).map {
_.map { _.traceId }
}
sortTraceIds(traceIds, limit, order)
}
}
def getTraceIdsByServiceName(serviceName: String, endTs: Long,
limit: Int, order: gen.Order): Future[Seq[Long]] = {
val method = "getTraceIdsByServiceName"
log.debug("%s. serviceName: %s endTs: %s limit: %s order: %s".format(method, serviceName, endTs, limit, order))
call(method) {
if (serviceName == null || "".equals(serviceName)) {
errorStats.counter("%s_no_service".format(method)).incr()
return Future.exception(gen.QueryException("No service name provided"))
}
FTrace.recordBinary("serviceName", serviceName)
FTrace.recordBinary("endTs", endTs)
FTrace.recordBinary("limit", limit)
FTrace.recordBinary("order", order)
val traceIds = index.getTraceIdsByName(serviceName, None, endTs, limit).map {
_.map { _.traceId }
}
sortTraceIds(traceIds, limit, order)
}
}
def getTraceIdsByAnnotation(serviceName: String, annotation: String, value: ByteBuffer, endTs: Long,
limit: Int, order: gen.Order): Future[Seq[Long]] = {
val method = "getTraceIdsByAnnotation"
log.debug("%s. serviceName: %s annotation: %s value: %s endTs: %s limit: %s order: %s".format(method, serviceName,
annotation, value, endTs, limit, order))
call(method) {
if (annotation == null || "".equals(annotation)) {
errorStats.counter("%s_no_annotation".format(method)).incr()
return Future.exception(gen.QueryException("No annotation provided"))
}
// do we have a valid annotation value to query indexes by?
val valueOption = convertToOption(value)
FTrace.recordBinary("serviceName", serviceName)
FTrace.recordBinary("annotation", annotation)
FTrace.recordBinary("endTs", endTs)
FTrace.recordBinary("limit", limit)
FTrace.recordBinary("order", order)
val traceIds = index.getTraceIdsByAnnotation(serviceName, annotation, valueOption, endTs, limit).map {
_.map { _.traceId }
}
sortTraceIds(traceIds, limit, order)
}
}
def tracesExist(traceIds: Seq[Long]): Future[Set[Long]] = {
log.debug("tracesExist. " + traceIds)
call("tracesExist") {
FTrace.recordBinary("numIds", traceIds.length)
storage.tracesExist(traceIds)
}
}
def getTracesByIds(traceIds: Seq[Long], adjust: Seq[gen.Adjust]): Future[Seq[gen.Trace]] = {
log.debug("getTracesByIds. " + traceIds + " adjust " + adjust)
call("getTracesByIds") {
val adjusters = getAdjusters(adjust)
FTrace.recordBinary("numIds", traceIds.length)
storage.getSpansByTraceIds(traceIds).map { traces =>
traces.map { spans =>
val trace = Trace(spans)
adjusters.foldLeft(trace)((t, adjuster) => adjuster.adjust(t)).toThrift
}
}
}
}
def getTraceTimelinesByIds(traceIds: Seq[Long],
adjust: Seq[gen.Adjust]): Future[Seq[gen.TraceTimeline]] = {
log.debug("getTraceTimelinesByIds. " + traceIds + " adjust " + adjust)
call("getTraceTimelinesByIds") {
val adjusters = getAdjusters(adjust)
FTrace.recordBinary("numIds", traceIds.length)
storage.getSpansByTraceIds(traceIds).map { traces =>
traces.flatMap { spans =>
val trace = Trace(spans)
TraceTimeline(adjusters.foldLeft(trace)((t, adjuster) => adjuster.adjust(t))).map(_.toThrift)
}
}
}
}
def getTraceSummariesByIds(traceIds: Seq[Long],
adjust: Seq[gen.Adjust]): Future[Seq[gen.TraceSummary]] = {
log.debug("getTraceSummariesByIds. traceIds: " + traceIds + " adjust " + adjust)
call("getTraceSummariesByIds") {
val adjusters = getAdjusters(adjust)
FTrace.recordBinary("numIds", traceIds.length)
storage.getSpansByTraceIds(traceIds.toList).map { traces =>
traces.flatMap { spans =>
val trace = Trace(spans)
TraceSummary(adjusters.foldLeft(trace)((t, adjuster) => adjuster.adjust(t))).map(_.toThrift)
}
}
}
}
def getTraceCombosByIds(traceIds: Seq[Long], adjust: Seq[gen.Adjust]): Future[Seq[gen.TraceCombo]] = {
log.debug("getTraceComboByIds. traceIds: " + traceIds + " adjust " + adjust)
call("getTraceComboByIds") {
val adjusters = getAdjusters(adjust)
FTrace.recordBinary("numIds", traceIds.length)
storage.getSpansByTraceIds(traceIds).map { traces =>
traces.map { spans =>
val trace = Trace(spans)
TraceCombo(adjusters.foldLeft(trace)((t, adjuster) => adjuster.adjust(t))).toThrift
}
}
}
}
def getDataTimeToLive: Future[Int] = {
log.debug("getDataTimeToLive")
call("getDataTimeToLive") {
Future(storage.getDataTimeToLive)
}
}
def getServiceNames: Future[Set[String]] = {
log.debug("getServiceNames")
call("getServiceNames") {
index.getServiceNames
}
}
def getSpanNames(service: String): Future[Set[String]] = {
log.debug("getSpanNames")
call("getSpanNames") {
index.getSpanNames(service)
}
}
def setTraceTimeToLive(traceId: Long, ttlSeconds: Int): Future[Unit] = {
log.debug("setTimeToLive: " + traceId + " " + ttlSeconds)
call("setTraceTimeToLive") {
storage.setTimeToLive(traceId, ttlSeconds.seconds)
}
}
def getTraceTimeToLive(traceId: Long): Future[Int] = {
log.debug("getTimeToLive: " + traceId)
call("getTraceTimeToLive") {
storage.getTimeToLive(traceId).map(_.inSeconds)
}
}
def getDependencies(serviceName: String): Future[Seq[String]] = {
log.debug("getDependencies: " + serviceName)
call("getDependencies") {
aggregates.getDependencies(serviceName)
}
}
def getTopAnnotations(serviceName: String): Future[Seq[String]] = {
log.debug("getTopAnnotations: " + serviceName)
call("getTopAnnotations") {
aggregates.getTopAnnotations(serviceName)
}
}
def getTopKeyValueAnnotations(serviceName: String): Future[Seq[String]] = {
log.debug("getTopKeyValueAnnotations: " + serviceName)
call("getTopKeyValueAnnotations") {
aggregates.getTopKeyValueAnnotations(serviceName)
}
}
private def checkIfRunning() = {
if (!running.get) {
log.warning("Server not running, throwing exception")
throw new TException("Server not running")
}
}
private[this] def call[T](name: String)(f: => Future[T]): Future[T] = {
checkIfRunning()
methodStats.counter(name).incr()
timingStats.timeFuture(name) {
f rescue {
case e: Exception => {
log.error(e, "%s failed".format(name))
errorStats.counter(name).incr()
Future.exception(gen.QueryException(e.toString))
}
}
}
}
/**
* Convert incoming Thrift order by enum into sort function.
*/
private def getOrderBy(order: gen.Order) = {
order match {
case gen.Order.None => OrderByDurationDesc
case gen.Order.DurationDesc => OrderByDurationDesc
case gen.Order.DurationAsc => OrderByDurationAsc
case gen.Order.TimestampDesc => OrderByTimestampDesc
case gen.Order.TimestampAsc => OrderByTimestampAsc
}
}
private def getAdjusters(adjusters: Seq[gen.Adjust]): Seq[Adjuster] = {
adjusters.flatMap { adjusterMap.get(_) }
}
/**
* Do we have a valid object to query indexes by?
*/
private def convertToOption[O](param: O): Option[O] = {
param match {
case null => None
case "" => None
case s => Some(s)
}
}
/**
* Given a sequence of traceIds get their durations
*/
private def getTraceIdDurations(
traceIds: Future[Seq[Long]]
): Future[Seq[TraceIdDuration]] = {
traceIds.map { t =>
Future.collect {
t.grouped(traceDurationFetchBatchSize)
.toSeq
.map {index.getTracesDuration(_)}
}
}.flatten.map {_.flatten}
}
private def sortTraceIds(
traceIds: Future[Seq[Long]],
limit: Int,
order: gen.Order
): Future[Seq[Long]] = {
// No sorting wanted
if (order == gen.Order.None) {
traceIds
} else {
val durations = getTraceIdDurations(traceIds)
durations map { d =>
d.sortWith(getOrderBy(order)).slice(0, limit).map(_.traceId)
}
}
}
}
| kevinyang0906/zipkin | zipkin-query-core/src/main/scala/com/twitter/zipkin/query/QueryService.scala | Scala | apache-2.0 | 16,233 |
package truerss.db.driver
import java.time.{Clock, LocalDateTime}
import com.zaxxer.hikari.{HikariConfig, HikariDataSource}
import slick.ast.FieldSymbol
import slick.dbio.DBIO
import slick.jdbc.{JdbcBackend, JdbcProfile}
import slick.jdbc.meta.{MColumn, MQName, MTable}
import slick.migration.api.{Dialect, GenericDialect, ReversibleMigrationSeq, TableMigration}
import truerss.db.{DbLayer, PluginSource, Predefined, Version}
import truerss.util.DbConfig
import scala.concurrent.Await
import scala.concurrent.duration._
object DbInitializer {
private val waitTime = 10 seconds
private val initFailTimeout = 1000
def initialize(dbConf: DbConfig, isUserConf: Boolean): DbLayer = {
val backend: Option[SupportedDb] = DBProfile.get(dbConf.dbBackend)
if (backend.isEmpty) {
Console.err.println(s"Unsupported database backend: ${dbConf.dbBackend}")
sys.exit(1)
}
val dbProfile = DBProfile.create(backend.get)
val props = dbProfile.props(dbConf, isUserConf)
val hc = new HikariConfig(props)
hc.setConnectionTestQuery("SELECT 1;")
hc.setPoolName("TrueRssPool")
hc.setInitializationFailTimeout(initFailTimeout)
hc.setMaximumPoolSize(dbProfile.defaultConnectionSize)
val db = try {
val ds = new HikariDataSource(hc)
JdbcBackend.Database.forDataSource(ds, None)
} catch {
case ex: Exception =>
Console.err.println(s"Database Initialization error. Check parameters for the db: $ex")
sys.exit(1)
}
val names = TableNames.default
val driver = CurrentDriver(dbProfile.profile, names)
createTables(db, driver)
runMigrations(db, dbProfile, driver)
DbLayer(db, driver)
}
private def createTables(db: JdbcBackend.DatabaseDef, driver: CurrentDriver): Unit = {
import driver.profile.api._
def run[T](description: String, act: DBIOAction[T, NoStream, Nothing]) = {
Console.out.println(s"----> $description")
Await.result(
db.run {
act
},
waitTime
)
}
val names = driver.tableNames
val tables = Await.result(db.run(MTable.getTables), waitTime)
val tableNames = tables.toList.map(_.name).map(_.name)
if (!tableNames.contains(names.sources)) {
run("create db", (driver.query.sources.schema ++ driver.query.feeds.schema).create)
}
if (!tableNames.contains(names.predefinedSettings)) {
run("add predefined settings tables", driver.query.predefinedSettings.schema.create)
}
if (!tableNames.contains(names.userSettings)) {
run("add user settings tables", driver.query.userSettings.schema.create)
}
if (!tableNames.contains(names.versions)) {
// no versions
run("create versions table", driver.query.versions.schema.create)
}
if (!tableNames.contains(names.pluginSources)) {
// no plugin sources
run("create plugin_sources table", driver.query.pluginSources.schema.create)
}
if (!tableNames.contains(names.sourceStatuses)) {
run(s"create ${names.sourceStatuses} table", driver.query.sourceStatuses.schema.create)
}
}
private def runMigrations(db: JdbcBackend.DatabaseDef, dbProfile: DBProfile, driver: CurrentDriver): Unit = {
import driver.profile.api._
val versions = Await.result(db.run(driver.query.versions.result), waitTime).toVector
val currentSourceIndexes = driver.query.sources.baseTableRow.indexes.map(_.name)
val v1 = Migration.addIndexes(dbProfile, driver, currentSourceIndexes)
val v2 = Migration.addEnclosure(db, dbProfile, driver)
runPredefinedChanges(db, driver)
addDefaultSource(db, driver)
val all = Vector(
v1,
v2
)
val allVersions = versions.map(_.id)
val need = all.filterNot { x => allVersions.contains(x.version) }
Console.out.println(s"detect: ${versions.size} migrations, need to run: ${need.size}")
need.foreach { m =>
m.changes match {
case Some(changes) =>
Console.out.println(s"run: ${m.version} -> ${m.description}")
Await.result(db.run(changes()), waitTime)
case None =>
Console.out.println(s"skip: ${m.version}: ${m.description}")
}
val version = Version(m.version, m.description, LocalDateTime.now(Clock.systemUTC()))
val f = db.run {
(driver.query.versions returning driver.query.versions.map(_.id)) += version
}
Await.result(f, waitTime)
}
Console.out.println("completed...")
}
// todo remove
def addDefaultSource(db: JdbcBackend.DatabaseDef, driver: CurrentDriver): Unit = {
import driver.profile.api._
val url = "https://github.com/truerss/plugins/releases/tag/1.0.0"
val length = Await.result(
db.run {
driver.query.pluginSources.filter(_.url === url).length.result
}, waitTime
)
if (length == 0) {
Console.println(s"Write: default plugin source")
Await.ready(
db.run {
driver.query.pluginSources ++= PluginSource(id = None, url = url) :: Nil
}, waitTime
)
}
}
def runPredefinedChanges(db: JdbcBackend.DatabaseDef, driver: CurrentDriver): Unit = {
import driver.profile.api._
Predefined.predefined.foreach { p =>
val q = Await.result(
db.run {
driver.query.predefinedSettings.filter(_.key === p.key).result
}, waitTime
)
if (q.isEmpty) {
Console.println(s"Write: $p predefined settings")
Await.ready(
db.run {
driver.query.predefinedSettings ++= p :: Nil
}, waitTime
)
}
}
}
case class Migration(version: Long, description: String, changes: Option[ReversibleMigrationSeq])
object Migration {
def addIndexes(dbProfile: DBProfile, driver: CurrentDriver, currentIndexes: Iterable[String]): Migration = {
implicit val dialect = GenericDialect.apply(dbProfile.profile)
val changes = if (currentIndexes.isEmpty) {
val sq = TableMigration(driver.query.sources)
.addIndexes(_.byUrlIndex)
.addIndexes(_.byNameIndex)
val fq = TableMigration(driver.query.feeds)
.addIndexes(_.bySourceIndex)
.addIndexes(_.byFavoriteIndex)
.addIndexes(_.byReadIndex)
.addIndexes(_.bySourceAndFavorite)
.addIndexes(_.bySourceAndReadIndex)
Some(sq & fq)
} else {
None
}
Migration(1L, "add indexes", changes)
}
def addEnclosure(
db: JdbcBackend.DatabaseDef,
dbProfile: DBProfile,
driver: CurrentDriver
): Migration = {
val feedsColumnsQuery = MColumn.getColumns(
MQName.local(driver.query.feeds.baseTableRow.tableName),
"enclosure"
)
val columns = Await.result(db.run(feedsColumnsQuery), waitTime)
implicit val dialect: Dialect[_ <: JdbcProfile] = GenericDialect(dbProfile.profile)
val changes = Option.when(columns.isEmpty)(new ReversibleMigrationSeq(
TableMigration(driver.query.feeds).addColumns(_.enclosure)
))
Migration(2L, "add enclosure", changes)
}
}
}
| truerss/truerss | src/main/scala/truerss/db/driver/DbInitializer.scala | Scala | mit | 7,110 |
/*
* Copyright (c) 2014 Paul Bernard
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Spectrum Finance is based in part on:
* QuantLib. http://quantlib.org/
*
*/
package org.quantintel.ql.instruments.bonds
/**
* @author Paul Bernard
*/
class CpiBond {
}
| pmularien/spectrum-old | financial/src/main/scala/org/quantintel/ql/instruments/bonds/CpiBond.scala | Scala | apache-2.0 | 782 |
/**
* Copyright (c) 2016 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.trustedanalytics.sparktk.graph.internal.ops.orientdb
import org.trustedanalytics.sparktk.graph.internal.constructors.fromorientdb.SchemaReader
import scala.collection.immutable.Map
import scala.collection.mutable
/**
* collect Statistics
*
* @param orientConf OrientDB configurations
* @return exported vertices and edges summary statistics
*/
class Statistics(orientConf: OrientdbConf, dbName: String) {
val orientGraphInstance = OrientdbGraphFactory.graphDbConnector(orientConf, dbName)
/**
* collect Statistics
*
* @param verticesCount count of vertices required to be exported to OrientDB database
* @param edgesCount count of edges required to be exported to OrientDB database
* @return exported vertices and edges summary statistics
*/
def getStats(verticesCount: Long, edgesCount: Long): ExportOrientdbStats = {
val dbUri = OrientdbGraphFactory.getUrl(orientConf, dbName)
val verticesSummary = getExportedVerticesSummary(verticesCount)
val verticesTypesStats = getExportVertexClassStats
val edgesSummary = getExportedEdgesSummary(edgesCount)
val edgesTypesStats = getExportEdgeClassStats
orientGraphInstance.shutdown()
new ExportOrientdbStats(verticesSummary, verticesTypesStats, edgesSummary, edgesTypesStats, dbUri)
}
/**
* Get vertices types statistics
*
* @return dictionary for the exported vertices types and count
*/
def getExportVertexClassStats: Map[String, Long] = {
val exportedVertices = mutable.Map[String, Long]()
val schemaReader = new SchemaReader(orientGraphInstance)
val vertexTypes = schemaReader.getVertexClasses.getOrElse(Set(orientGraphInstance.getVertexBaseType.getName))
vertexTypes.foreach(vertexType => {
exportedVertices.put(vertexType, orientGraphInstance.countVertices(vertexType))
})
exportedVertices.toMap
}
/**
* Get edges types statistics
*
* @return dictionary for the exported edges types and count
*/
def getExportEdgeClassStats: Map[String, Long] = {
val exportedEdges = mutable.Map[String, Long]()
val schemaReader = new SchemaReader(orientGraphInstance)
val edgeTypes = schemaReader.getEdgeClasses.getOrElse(Set(orientGraphInstance.getEdgeBaseType.getName))
edgeTypes.foreach(edgeType => {
val count = orientGraphInstance.countVertices(edgeType)
exportedEdges.put(edgeType, orientGraphInstance.countVertices(edgeType))
})
exportedEdges.toMap
}
/**
* Get the exported vertices summary statistics
*
* @param verticesCount count of vertices required to be exported to OrientDB database
* @return dictionary for the exported vertices success and failure count
*/
def getExportedVerticesSummary(verticesCount: Long): Map[String, Long] = {
val successCountLabel = "Total Exported Vertices Count"
val failureCountLabel = "Failure Count"
val stats = mutable.Map[String, Long]()
stats.put(successCountLabel, orientGraphInstance.countVertices())
stats.put(failureCountLabel, verticesCount - orientGraphInstance.countVertices())
stats.toMap
}
/**
* Get the exported edges summary statistics
*
* @param edgesCount count of edges required to be exported to OrientDB database
* @return dictionary for the exported edges success and failure count
*/
def getExportedEdgesSummary(edgesCount: Long): Map[String, Long] = {
val successCountLabel = "Total Exported Edges Count"
val failureCountLabel = "Failure Count"
val stats = mutable.Map[String, Long]()
stats.put(successCountLabel, orientGraphInstance.countEdges())
stats.put(failureCountLabel, edgesCount - orientGraphInstance.countEdges())
stats.toMap
}
}
| trustedanalytics/spark-tk | sparktk-core/src/main/scala/org/trustedanalytics/sparktk/graph/internal/ops/orientdb/Statistics.scala | Scala | apache-2.0 | 4,410 |
package pl.edu.agh.mplt.visitors.translator.latex
import pl.edu.agh.mplt.parser.declaration.data.DataDeclaration
import pl.edu.agh.mplt.visitors.translator.Translator
import pl.edu.agh.mplt.parser.phrase.set.{IndexedSet, Indexing}
class DataTranslator extends Translator[DataDeclaration] {
override def apply(node: DataDeclaration): String = {
val name = node.name
val indexedName = node.indexing.map(zipWithIndexes(name, _)) getOrElse name
val indexing: String = node.indexing.map((new IndexingTranslator)(_)) getOrElse ""
val attrs = joinWith(",")(node.attributes.map((new AttributeTranslator(indexedName))(_)))
s"$name: $indexing $attrs"
}
private def zipWithIndexes(name: String, indexing: Indexing): String = {
val indices = joinWith(",")(indexing.sexprs.flatMap{case IndexedSet(is, _) => is })
if(indices != "" && indices != " ") s"${name}_{$indices}"
else name
}
}
| marek1840/MPLT | src/main/scala/pl/edu/agh/mplt/visitors/translator/latex/DataTranslator.scala | Scala | mit | 968 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.aggregate
import java.util
import org.apache.calcite.rel.`type`._
import org.apache.calcite.rel.core.AggregateCall
import org.apache.calcite.sql.`type`.SqlTypeName
import org.apache.calcite.sql.`type`.SqlTypeName._
import org.apache.calcite.sql.fun._
import org.apache.calcite.sql.{SqlAggFunction, SqlKind}
import org.apache.flink.api.common.functions.{MapFunction, RichGroupReduceFunction, AggregateFunction => DataStreamAggFunction, _}
import org.apache.flink.api.common.typeinfo.{BasicTypeInfo, TypeInformation}
import org.apache.flink.api.java.tuple.Tuple
import org.apache.flink.api.java.typeutils.RowTypeInfo
import org.apache.flink.streaming.api.functions.ProcessFunction
import org.apache.flink.streaming.api.functions.windowing.{AllWindowFunction, WindowFunction}
import org.apache.flink.streaming.api.windowing.windows.{Window => DataStreamWindow}
import org.apache.flink.table.api.dataview.DataViewSpec
import org.apache.flink.table.api.{StreamQueryConfig, TableException}
import org.apache.flink.table.calcite.FlinkRelBuilder.NamedWindowProperty
import org.apache.flink.table.calcite.FlinkTypeFactory
import org.apache.flink.table.codegen.AggregationCodeGenerator
import org.apache.flink.table.expressions.ExpressionUtils.isTimeIntervalLiteral
import org.apache.flink.table.expressions._
import org.apache.flink.table.functions.aggfunctions._
import org.apache.flink.table.functions.utils.AggSqlFunction
import org.apache.flink.table.functions.utils.UserDefinedFunctionUtils._
import org.apache.flink.table.functions.{AggregateFunction => TableAggregateFunction}
import org.apache.flink.table.plan.logical._
import org.apache.flink.table.runtime.types.{CRow, CRowTypeInfo}
import org.apache.flink.table.typeutils.TypeCheckUtils._
import org.apache.flink.table.typeutils.{RowIntervalTypeInfo, TimeIntervalTypeInfo}
import org.apache.flink.types.Row
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
object AggregateUtil {
type CalcitePair[T, R] = org.apache.calcite.util.Pair[T, R]
type JavaList[T] = java.util.List[T]
/**
* Create an [[org.apache.flink.streaming.api.functions.ProcessFunction]] for unbounded OVER
* window to evaluate final aggregate value.
*
* @param generator code generator instance
* @param namedAggregates Physical calls to aggregate functions and their output field names
* @param inputType Physical type of the row.
* @param inputTypeInfo Physical type information of the row.
* @param inputFieldTypeInfo Physical type information of the row's fields.
* @param rowTimeIdx The index of the rowtime field or None in case of processing time.
* @param isPartitioned It is a tag that indicate whether the input is partitioned
* @param isRowsClause It is a tag that indicates whether the OVER clause is ROWS clause
*/
private[flink] def createUnboundedOverProcessFunction(
generator: AggregationCodeGenerator,
namedAggregates: Seq[CalcitePair[AggregateCall, String]],
inputType: RelDataType,
inputTypeInfo: TypeInformation[Row],
inputFieldTypeInfo: Seq[TypeInformation[_]],
queryConfig: StreamQueryConfig,
rowTimeIdx: Option[Int],
isPartitioned: Boolean,
isRowsClause: Boolean)
: ProcessFunction[CRow, CRow] = {
val (aggFields, aggregates, accTypes, accSpecs) =
transformToAggregateFunctions(
namedAggregates.map(_.getKey),
inputType,
needRetraction = false,
isStateBackedDataViews = true)
val aggregationStateType: RowTypeInfo = new RowTypeInfo(accTypes: _*)
val forwardMapping = (0 until inputType.getFieldCount).toArray
val aggMapping = aggregates.indices.map(x => x + inputType.getFieldCount).toArray
val outputArity = inputType.getFieldCount + aggregates.length
val genFunction = generator.generateAggregations(
"UnboundedProcessingOverAggregateHelper",
inputFieldTypeInfo,
aggregates,
aggFields,
aggMapping,
partialResults = false,
forwardMapping,
None,
None,
outputArity,
needRetract = false,
needMerge = false,
needReset = false,
accConfig = Some(accSpecs)
)
if (rowTimeIdx.isDefined) {
if (isRowsClause) {
// ROWS unbounded over process function
new RowTimeUnboundedRowsOver(
genFunction,
aggregationStateType,
CRowTypeInfo(inputTypeInfo),
rowTimeIdx.get,
queryConfig)
} else {
// RANGE unbounded over process function
new RowTimeUnboundedRangeOver(
genFunction,
aggregationStateType,
CRowTypeInfo(inputTypeInfo),
rowTimeIdx.get,
queryConfig)
}
} else {
new ProcTimeUnboundedOver(
genFunction,
aggregationStateType,
queryConfig)
}
}
/**
* Create an [[org.apache.flink.streaming.api.functions.ProcessFunction]] for group (without
* window) aggregate to evaluate final aggregate value.
*
* @param generator code generator instance
* @param namedAggregates List of calls to aggregate functions and their output field names
* @param inputRowType Input row type
* @param inputFieldTypes Types of the physical input fields
* @param groupings the position (in the input Row) of the grouping keys
* @param queryConfig The configuration of the query to generate.
* @param generateRetraction It is a tag that indicates whether generate retract record.
* @param consumeRetraction It is a tag that indicates whether consume the retract record.
* @return [[org.apache.flink.streaming.api.functions.ProcessFunction]]
*/
private[flink] def createGroupAggregateFunction(
generator: AggregationCodeGenerator,
namedAggregates: Seq[CalcitePair[AggregateCall, String]],
inputRowType: RelDataType,
inputFieldTypes: Seq[TypeInformation[_]],
groupings: Array[Int],
queryConfig: StreamQueryConfig,
generateRetraction: Boolean,
consumeRetraction: Boolean): ProcessFunction[CRow, CRow] = {
val (aggFields, aggregates, accTypes, accSpecs) =
transformToAggregateFunctions(
namedAggregates.map(_.getKey),
inputRowType,
consumeRetraction,
isStateBackedDataViews = true)
val aggMapping = aggregates.indices.map(_ + groupings.length).toArray
val outputArity = groupings.length + aggregates.length
val aggregationStateType: RowTypeInfo = new RowTypeInfo(accTypes: _*)
val genFunction = generator.generateAggregations(
"NonWindowedAggregationHelper",
inputFieldTypes,
aggregates,
aggFields,
aggMapping,
partialResults = false,
groupings,
None,
None,
outputArity,
consumeRetraction,
needMerge = false,
needReset = false,
accConfig = Some(accSpecs)
)
new GroupAggProcessFunction(
genFunction,
aggregationStateType,
generateRetraction,
queryConfig)
}
/**
* Create an [[org.apache.flink.streaming.api.functions.ProcessFunction]] for ROWS clause
* bounded OVER window to evaluate final aggregate value.
*
* @param generator code generator instance
* @param namedAggregates Physical calls to aggregate functions and their output field names
* @param inputType Physical type of the row.
* @param inputTypeInfo Physical type information of the row.
* @param inputFieldTypeInfo Physical type information of the row's fields.
* @param precedingOffset the preceding offset
* @param isRowsClause It is a tag that indicates whether the OVER clause is ROWS clause
* @param rowTimeIdx The index of the rowtime field or None in case of processing time.
* @return [[org.apache.flink.streaming.api.functions.ProcessFunction]]
*/
private[flink] def createBoundedOverProcessFunction(
generator: AggregationCodeGenerator,
namedAggregates: Seq[CalcitePair[AggregateCall, String]],
inputType: RelDataType,
inputTypeInfo: TypeInformation[Row],
inputFieldTypeInfo: Seq[TypeInformation[_]],
precedingOffset: Long,
queryConfig: StreamQueryConfig,
isRowsClause: Boolean,
rowTimeIdx: Option[Int])
: ProcessFunction[CRow, CRow] = {
val needRetract = true
val (aggFields, aggregates, accTypes, accSpecs) =
transformToAggregateFunctions(
namedAggregates.map(_.getKey),
inputType,
needRetract,
isStateBackedDataViews = true)
val aggregationStateType: RowTypeInfo = new RowTypeInfo(accTypes: _*)
val inputRowType = CRowTypeInfo(inputTypeInfo)
val forwardMapping = (0 until inputType.getFieldCount).toArray
val aggMapping = aggregates.indices.map(x => x + inputType.getFieldCount).toArray
val outputArity = inputType.getFieldCount + aggregates.length
val genFunction = generator.generateAggregations(
"BoundedOverAggregateHelper",
inputFieldTypeInfo,
aggregates,
aggFields,
aggMapping,
partialResults = false,
forwardMapping,
None,
None,
outputArity,
needRetract,
needMerge = false,
needReset = false,
accConfig = Some(accSpecs)
)
if (rowTimeIdx.isDefined) {
if (isRowsClause) {
new RowTimeBoundedRowsOver(
genFunction,
aggregationStateType,
inputRowType,
precedingOffset,
rowTimeIdx.get,
queryConfig)
} else {
new RowTimeBoundedRangeOver(
genFunction,
aggregationStateType,
inputRowType,
precedingOffset,
rowTimeIdx.get,
queryConfig)
}
} else {
if (isRowsClause) {
new ProcTimeBoundedRowsOver(
genFunction,
precedingOffset,
aggregationStateType,
inputRowType,
queryConfig)
} else {
new ProcTimeBoundedRangeOver(
genFunction,
precedingOffset,
aggregationStateType,
inputRowType,
queryConfig)
}
}
}
/**
* Create a [[org.apache.flink.api.common.functions.MapFunction]] that prepares for aggregates.
* The output of the function contains the grouping keys and the timestamp and the intermediate
* aggregate values of all aggregate function. The timestamp field is aligned to time window
* start and used to be a grouping key in case of time window. In case of count window on
* event-time, the timestamp is not aligned and used to sort.
*
* The output is stored in Row by the following format:
* {{{
* avg(x) count(z)
* | |
* v v
* +---------+---------+----------------+----------------+------------------+-------+
* |groupKey1|groupKey2| AvgAccumulator | SumAccumulator | CountAccumulator |rowtime|
* +---------+---------+----------------+----------------+------------------+-------+
* ^ ^
* | |
* sum(y) rowtime to group or sort
* }}}
*
* NOTE: this function is only used for time based window on batch tables.
*/
def createDataSetWindowPrepareMapFunction(
generator: AggregationCodeGenerator,
window: LogicalWindow,
namedAggregates: Seq[CalcitePair[AggregateCall, String]],
groupings: Array[Int],
inputType: RelDataType,
inputFieldTypeInfo: Seq[TypeInformation[_]],
isParserCaseSensitive: Boolean)
: MapFunction[Row, Row] = {
val needRetract = false
val (aggFieldIndexes, aggregates, accTypes, _) = transformToAggregateFunctions(
namedAggregates.map(_.getKey),
inputType,
needRetract)
val mapReturnType: RowTypeInfo =
createRowTypeForKeysAndAggregates(
groupings,
aggregates,
accTypes,
inputType,
Some(Array(BasicTypeInfo.LONG_TYPE_INFO)))
val (timeFieldPos, tumbleTimeWindowSize) = window match {
case TumblingGroupWindow(_, time, size) =>
val timeFieldPos = getTimeFieldPosition(time, inputType, isParserCaseSensitive)
size match {
case Literal(value: Long, TimeIntervalTypeInfo.INTERVAL_MILLIS) =>
(timeFieldPos, Some(value))
case _ => (timeFieldPos, None)
}
case SessionGroupWindow(_, time, _) =>
(getTimeFieldPosition(time, inputType, isParserCaseSensitive), None)
case SlidingGroupWindow(_, time, size, slide) =>
val timeFieldPos = getTimeFieldPosition(time, inputType, isParserCaseSensitive)
size match {
case Literal(value: Long, TimeIntervalTypeInfo.INTERVAL_MILLIS) =>
// pre-tumble incremental aggregates on time-windows
val timeFieldPos = getTimeFieldPosition(time, inputType, isParserCaseSensitive)
val preTumblingSize = determineLargestTumblingSize(asLong(size), asLong(slide))
(timeFieldPos, Some(preTumblingSize))
case _ => (timeFieldPos, None)
}
case _ =>
throw new UnsupportedOperationException(s"$window is currently not supported on batch")
}
val aggMapping = aggregates.indices.toArray.map(_ + groupings.length)
val outputArity = aggregates.length + groupings.length + 1
val genFunction = generator.generateAggregations(
"DataSetAggregatePrepareMapHelper",
inputFieldTypeInfo,
aggregates,
aggFieldIndexes,
aggMapping,
partialResults = true,
groupings,
None,
None,
outputArity,
needRetract,
needMerge = false,
needReset = true,
None
)
new DataSetWindowAggMapFunction(
genFunction,
timeFieldPos,
tumbleTimeWindowSize,
mapReturnType)
}
/**
* Create a [[org.apache.flink.api.common.functions.GroupReduceFunction]] that prepares for
* partial aggregates of sliding windows (time and count-windows).
* It requires a prepared input (with intermediate aggregate fields and aligned rowtime for
* pre-tumbling in case of time-windows), pre-aggregates (pre-tumbles) rows, aligns the
* window-start, and replicates or omits records for different panes of a sliding window.
*
* The output of the function contains the grouping keys, the intermediate aggregate values of
* all aggregate function and the aligned window start. Window start must not be a timestamp,
* but can also be a count value for count-windows.
*
* The output is stored in Row by the following format:
*
* {{{
* avg(x) aggOffsetInRow = 2 count(z) aggOffsetInRow = 5
* | |
* v v
* +---------+---------+--------+--------+--------+--------+-------------+
* |groupKey1|groupKey2| sum1 | count1 | sum2 | count2 | windowStart |
* +---------+---------+--------+--------+--------+--------+-------------+
* ^ ^
* | |
* sum(y) aggOffsetInRow = 4 window start for pane mapping
* }}}
*
* NOTE: this function is only used for sliding windows with partial aggregates on batch tables.
*/
def createDataSetSlideWindowPrepareGroupReduceFunction(
generator: AggregationCodeGenerator,
window: LogicalWindow,
namedAggregates: Seq[CalcitePair[AggregateCall, String]],
groupings: Array[Int],
physicalInputRowType: RelDataType,
physicalInputTypes: Seq[TypeInformation[_]],
isParserCaseSensitive: Boolean)
: RichGroupReduceFunction[Row, Row] = {
val needRetract = false
val (aggFieldIndexes, aggregates, accTypes, _) = transformToAggregateFunctions(
namedAggregates.map(_.getKey),
physicalInputRowType,
needRetract)
val returnType: RowTypeInfo = createRowTypeForKeysAndAggregates(
groupings,
aggregates,
accTypes,
physicalInputRowType,
Some(Array(BasicTypeInfo.LONG_TYPE_INFO)))
val keysAndAggregatesArity = groupings.length + namedAggregates.length
window match {
case SlidingGroupWindow(_, _, size, slide) if isTimeInterval(size.resultType) =>
// sliding time-window for partial aggregations
val genFunction = generator.generateAggregations(
"DataSetAggregatePrepareMapHelper",
physicalInputTypes,
aggregates,
aggFieldIndexes,
aggregates.indices.map(_ + groupings.length).toArray,
partialResults = true,
groupings.indices.toArray,
Some(aggregates.indices.map(_ + groupings.length).toArray),
None,
keysAndAggregatesArity + 1,
needRetract,
needMerge = true,
needReset = true,
None
)
new DataSetSlideTimeWindowAggReduceGroupFunction(
genFunction,
keysAndAggregatesArity,
asLong(size),
asLong(slide),
returnType)
case _ =>
throw new UnsupportedOperationException(s"$window is currently not supported on batch.")
}
}
/**
* Create a [[org.apache.flink.api.common.functions.FlatMapFunction]] that prepares for
* non-incremental aggregates of sliding windows (time-windows).
*
* It requires a prepared input (with intermediate aggregate fields), aligns the
* window-start, and replicates or omits records for different panes of a sliding window.
*
* The output of the function contains the grouping keys, the intermediate aggregate values of
* all aggregate function and the aligned window start.
*
* The output is stored in Row by the following format:
*
* {{{
* avg(x) aggOffsetInRow = 2 count(z) aggOffsetInRow = 5
* | |
* v v
* +---------+---------+--------+--------+--------+--------+-------------+
* |groupKey1|groupKey2| sum1 | count1 | sum2 | count2 | windowStart |
* +---------+---------+--------+--------+--------+--------+-------------+
* ^ ^
* | |
* sum(y) aggOffsetInRow = 4 window start for pane mapping
* }}}
*
* NOTE: this function is only used for time-based sliding windows on batch tables.
*/
def createDataSetSlideWindowPrepareFlatMapFunction(
window: LogicalWindow,
namedAggregates: Seq[CalcitePair[AggregateCall, String]],
groupings: Array[Int],
inputType: TypeInformation[Row],
isParserCaseSensitive: Boolean)
: FlatMapFunction[Row, Row] = {
window match {
case SlidingGroupWindow(_, _, size, slide) if isTimeInterval(size.resultType) =>
new DataSetSlideTimeWindowAggFlatMapFunction(
inputType.getArity - 1,
asLong(size),
asLong(slide),
inputType)
case _ =>
throw new UnsupportedOperationException(
s"$window is currently not supported in a batch environment.")
}
}
/**
* Create a [[org.apache.flink.api.common.functions.GroupReduceFunction]] to compute window
* aggregates on batch tables. If all aggregates support partial aggregation and is a time
* window, the [[org.apache.flink.api.common.functions.GroupReduceFunction]] implements
* [[org.apache.flink.api.common.functions.CombineFunction]] as well.
*
* NOTE: this function is only used for window on batch tables.
*/
def createDataSetWindowAggregationGroupReduceFunction(
generator: AggregationCodeGenerator,
window: LogicalWindow,
namedAggregates: Seq[CalcitePair[AggregateCall, String]],
physicalInputRowType: RelDataType,
physicalInputTypes: Seq[TypeInformation[_]],
outputType: RelDataType,
groupings: Array[Int],
properties: Seq[NamedWindowProperty],
isInputCombined: Boolean = false)
: RichGroupReduceFunction[Row, Row] = {
val needRetract = false
val (aggFieldIndexes, aggregates, _, _) = transformToAggregateFunctions(
namedAggregates.map(_.getKey),
physicalInputRowType,
needRetract)
val aggMapping = aggregates.indices.toArray.map(_ + groupings.length)
val genPreAggFunction = generator.generateAggregations(
"GroupingWindowAggregateHelper",
physicalInputTypes,
aggregates,
aggFieldIndexes,
aggMapping,
partialResults = true,
groupings,
Some(aggregates.indices.map(_ + groupings.length).toArray),
None,
outputType.getFieldCount,
needRetract,
needMerge = true,
needReset = true,
None
)
val genFinalAggFunction = generator.generateAggregations(
"GroupingWindowAggregateHelper",
physicalInputTypes,
aggregates,
aggFieldIndexes,
aggMapping,
partialResults = false,
groupings.indices.toArray,
Some(aggregates.indices.map(_ + groupings.length).toArray),
None,
outputType.getFieldCount,
needRetract,
needMerge = true,
needReset = true,
None
)
val keysAndAggregatesArity = groupings.length + namedAggregates.length
window match {
case TumblingGroupWindow(_, _, size) if isTimeInterval(size.resultType) =>
// tumbling time window
val (startPos, endPos, _) = computeWindowPropertyPos(properties)
if (doAllSupportPartialMerge(aggregates)) {
// for incremental aggregations
new DataSetTumbleTimeWindowAggReduceCombineFunction(
genPreAggFunction,
genFinalAggFunction,
asLong(size),
startPos,
endPos,
keysAndAggregatesArity)
}
else {
// for non-incremental aggregations
new DataSetTumbleTimeWindowAggReduceGroupFunction(
genFinalAggFunction,
asLong(size),
startPos,
endPos,
outputType.getFieldCount)
}
case TumblingGroupWindow(_, _, size) =>
// tumbling count window
new DataSetTumbleCountWindowAggReduceGroupFunction(
genFinalAggFunction,
asLong(size))
case SessionGroupWindow(_, _, gap) =>
val (startPos, endPos, _) = computeWindowPropertyPos(properties)
new DataSetSessionWindowAggReduceGroupFunction(
genFinalAggFunction,
keysAndAggregatesArity,
startPos,
endPos,
asLong(gap),
isInputCombined)
case SlidingGroupWindow(_, _, size, _) if isTimeInterval(size.resultType) =>
val (startPos, endPos, _) = computeWindowPropertyPos(properties)
if (doAllSupportPartialMerge(aggregates)) {
// for partial aggregations
new DataSetSlideWindowAggReduceCombineFunction(
genPreAggFunction,
genFinalAggFunction,
keysAndAggregatesArity,
startPos,
endPos,
asLong(size))
}
else {
// for non-partial aggregations
new DataSetSlideWindowAggReduceGroupFunction(
genFinalAggFunction,
keysAndAggregatesArity,
startPos,
endPos,
asLong(size))
}
case SlidingGroupWindow(_, _, size, _) =>
new DataSetSlideWindowAggReduceGroupFunction(
genFinalAggFunction,
keysAndAggregatesArity,
None,
None,
asLong(size))
case _ =>
throw new UnsupportedOperationException(s"$window is currently not supported on batch")
}
}
/**
* Create a [[org.apache.flink.api.common.functions.MapPartitionFunction]] that aggregation
* for aggregates.
* The function returns aggregate values of all aggregate function which are
* organized by the following format:
*
* {{{
* avg(x) aggOffsetInRow = 2 count(z) aggOffsetInRow = 5
* | | windowEnd(max(rowtime)
* | | |
* v v v
* +--------+--------+--------+--------+-----------+---------+
* | sum1 | count1 | sum2 | count2 |windowStart|windowEnd|
* +--------+--------+--------+--------+-----------+---------+
* ^ ^
* | |
* sum(y) aggOffsetInRow = 4 windowStart(min(rowtime))
*
* }}}
*
*/
def createDataSetWindowAggregationMapPartitionFunction(
generator: AggregationCodeGenerator,
window: LogicalWindow,
namedAggregates: Seq[CalcitePair[AggregateCall, String]],
physicalInputRowType: RelDataType,
physicalInputTypes: Seq[TypeInformation[_]],
groupings: Array[Int]): MapPartitionFunction[Row, Row] = {
val needRetract = false
val (aggFieldIndexes, aggregates, accTypes, _) = transformToAggregateFunctions(
namedAggregates.map(_.getKey),
physicalInputRowType,
needRetract)
val aggMapping = aggregates.indices.map(_ + groupings.length).toArray
val keysAndAggregatesArity = groupings.length + namedAggregates.length
window match {
case SessionGroupWindow(_, _, gap) =>
val combineReturnType: RowTypeInfo =
createRowTypeForKeysAndAggregates(
groupings,
aggregates,
accTypes,
physicalInputRowType,
Option(Array(BasicTypeInfo.LONG_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO)))
val genFunction = generator.generateAggregations(
"GroupingWindowAggregateHelper",
physicalInputTypes,
aggregates,
aggFieldIndexes,
aggMapping,
partialResults = true,
groupings.indices.toArray,
Some(aggregates.indices.map(_ + groupings.length).toArray),
None,
groupings.length + aggregates.length + 2,
needRetract,
needMerge = true,
needReset = true,
None
)
new DataSetSessionWindowAggregatePreProcessor(
genFunction,
keysAndAggregatesArity,
asLong(gap),
combineReturnType)
case _ =>
throw new UnsupportedOperationException(s"$window is currently not supported on batch")
}
}
/**
* Create a [[org.apache.flink.api.common.functions.GroupCombineFunction]] that pre-aggregation
* for aggregates.
* The function returns intermediate aggregate values of all aggregate function which are
* organized by the following format:
* {{{
* avg(x) windowEnd(max(rowtime)
* | |
* v v
* +---------+---------+----------------+----------------+-------------+-----------+
* |groupKey1|groupKey2| AvgAccumulator | SumAccumulator | windowStart | windowEnd |
* +---------+---------+----------------+----------------+-------------+-----------+
* ^ ^
* | |
* sum(y) windowStart(min(rowtime))
* }}}
*
*/
private[flink] def createDataSetWindowAggregationCombineFunction(
generator: AggregationCodeGenerator,
window: LogicalWindow,
namedAggregates: Seq[CalcitePair[AggregateCall, String]],
physicalInputRowType: RelDataType,
physicalInputTypes: Seq[TypeInformation[_]],
groupings: Array[Int])
: GroupCombineFunction[Row, Row] = {
val needRetract = false
val (aggFieldIndexes, aggregates, accTypes, _) = transformToAggregateFunctions(
namedAggregates.map(_.getKey),
physicalInputRowType,
needRetract)
val aggMapping = aggregates.indices.map(_ + groupings.length).toArray
val keysAndAggregatesArity = groupings.length + namedAggregates.length
window match {
case SessionGroupWindow(_, _, gap) =>
val combineReturnType: RowTypeInfo =
createRowTypeForKeysAndAggregates(
groupings,
aggregates,
accTypes,
physicalInputRowType,
Option(Array(BasicTypeInfo.LONG_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO)))
val genFunction = generator.generateAggregations(
"GroupingWindowAggregateHelper",
physicalInputTypes,
aggregates,
aggFieldIndexes,
aggMapping,
partialResults = true,
groupings.indices.toArray,
Some(aggregates.indices.map(_ + groupings.length).toArray),
None,
groupings.length + aggregates.length + 2,
needRetract,
needMerge = true,
needReset = true,
None
)
new DataSetSessionWindowAggregatePreProcessor(
genFunction,
keysAndAggregatesArity,
asLong(gap),
combineReturnType)
case _ =>
throw new UnsupportedOperationException(
s" [ ${window.getClass.getCanonicalName.split("\\\\.").last} ] is currently not " +
s"supported on batch")
}
}
/**
* Create functions to compute a [[org.apache.flink.table.plan.nodes.dataset.DataSetAggregate]].
* If all aggregation functions support pre-aggregation, a pre-aggregation function and the
* respective output type are generated as well.
*/
private[flink] def createDataSetAggregateFunctions(
generator: AggregationCodeGenerator,
namedAggregates: Seq[CalcitePair[AggregateCall, String]],
inputType: RelDataType,
inputFieldTypeInfo: Seq[TypeInformation[_]],
outputType: RelDataType,
groupings: Array[Int],
inGroupingSet: Boolean): (Option[DataSetPreAggFunction],
Option[TypeInformation[Row]],
RichGroupReduceFunction[Row, Row]) = {
val needRetract = false
val (aggInFields, aggregates, accTypes, _) = transformToAggregateFunctions(
namedAggregates.map(_.getKey),
inputType,
needRetract)
val (gkeyOutMapping, aggOutMapping) = getOutputMappings(
namedAggregates,
groupings,
inputType,
outputType
)
val constantFlags: Option[Array[(Int, Boolean)]] =
if (inGroupingSet) {
val groupingSetsMapping = getGroupingSetsIndicatorMapping(inputType, outputType)
val nonNullKeysFields = gkeyOutMapping.map(_._1)
val flags = for ((in, out) <- groupingSetsMapping) yield
(out, !nonNullKeysFields.contains(in))
Some(flags)
} else {
None
}
val aggOutFields = aggOutMapping.map(_._1)
if (doAllSupportPartialMerge(aggregates)) {
// compute preaggregation type
val preAggFieldTypes = gkeyOutMapping.map(_._2)
.map(inputType.getFieldList.get(_).getType)
.map(FlinkTypeFactory.toTypeInfo) ++ accTypes
val preAggRowType = new RowTypeInfo(preAggFieldTypes: _*)
val genPreAggFunction = generator.generateAggregations(
"DataSetAggregatePrepareMapHelper",
inputFieldTypeInfo,
aggregates,
aggInFields,
aggregates.indices.map(_ + groupings.length).toArray,
partialResults = true,
groupings,
None,
None,
groupings.length + aggregates.length,
needRetract,
needMerge = false,
needReset = true,
None
)
// compute mapping of forwarded grouping keys
val gkeyMapping: Array[Int] = if (gkeyOutMapping.nonEmpty) {
val gkeyOutFields = gkeyOutMapping.map(_._1)
val mapping = Array.fill[Int](gkeyOutFields.max + 1)(-1)
gkeyOutFields.zipWithIndex.foreach(m => mapping(m._1) = m._2)
mapping
} else {
new Array[Int](0)
}
val genFinalAggFunction = generator.generateAggregations(
"DataSetAggregateFinalHelper",
inputFieldTypeInfo,
aggregates,
aggInFields,
aggOutFields,
partialResults = false,
gkeyMapping,
Some(aggregates.indices.map(_ + groupings.length).toArray),
constantFlags,
outputType.getFieldCount,
needRetract,
needMerge = true,
needReset = true,
None
)
(
Some(new DataSetPreAggFunction(genPreAggFunction)),
Some(preAggRowType),
new DataSetFinalAggFunction(genFinalAggFunction)
)
}
else {
val genFunction = generator.generateAggregations(
"DataSetAggregateHelper",
inputFieldTypeInfo,
aggregates,
aggInFields,
aggOutFields,
partialResults = false,
groupings,
None,
constantFlags,
outputType.getFieldCount,
needRetract,
needMerge = false,
needReset = true,
None
)
(
None,
None,
new DataSetAggFunction(genFunction)
)
}
}
/**
* Create an [[AllWindowFunction]] for non-partitioned window aggregates.
*/
private[flink] def createAggregationAllWindowFunction(
window: LogicalWindow,
finalRowArity: Int,
properties: Seq[NamedWindowProperty])
: AllWindowFunction[Row, CRow, DataStreamWindow] = {
if (isTimeWindow(window)) {
val (startPos, endPos, timePos) = computeWindowPropertyPos(properties)
new IncrementalAggregateAllTimeWindowFunction(
startPos,
endPos,
timePos,
finalRowArity)
.asInstanceOf[AllWindowFunction[Row, CRow, DataStreamWindow]]
} else {
new IncrementalAggregateAllWindowFunction(
finalRowArity)
}
}
/**
* Create a [[WindowFunction]] for group window aggregates.
*/
private[flink] def createAggregationGroupWindowFunction(
window: LogicalWindow,
numGroupingKeys: Int,
numAggregates: Int,
finalRowArity: Int,
properties: Seq[NamedWindowProperty]):
WindowFunction[Row, CRow, Tuple, DataStreamWindow] = {
if (isTimeWindow(window)) {
val (startPos, endPos, timePos) = computeWindowPropertyPos(properties)
new IncrementalAggregateTimeWindowFunction(
numGroupingKeys,
numAggregates,
startPos,
endPos,
timePos,
finalRowArity)
.asInstanceOf[WindowFunction[Row, CRow, Tuple, DataStreamWindow]]
} else {
new IncrementalAggregateWindowFunction(
numGroupingKeys,
numAggregates,
finalRowArity)
}
}
private[flink] def createDataStreamAggregateFunction(
generator: AggregationCodeGenerator,
namedAggregates: Seq[CalcitePair[AggregateCall, String]],
inputType: RelDataType,
inputFieldTypeInfo: Seq[TypeInformation[_]],
outputType: RelDataType,
groupingKeys: Array[Int],
needMerge: Boolean)
: (DataStreamAggFunction[CRow, Row, Row], RowTypeInfo, RowTypeInfo) = {
val needRetract = false
val (aggFields, aggregates, accTypes, _) =
transformToAggregateFunctions(
namedAggregates.map(_.getKey),
inputType,
needRetract)
val aggMapping = aggregates.indices.toArray
val outputArity = aggregates.length
val genFunction = generator.generateAggregations(
"GroupingWindowAggregateHelper",
inputFieldTypeInfo,
aggregates,
aggFields,
aggMapping,
partialResults = false,
groupingKeys,
None,
None,
outputArity,
needRetract,
needMerge,
needReset = false,
None
)
val aggResultTypes = namedAggregates.map(a => FlinkTypeFactory.toTypeInfo(a.left.getType))
val accumulatorRowType = new RowTypeInfo(accTypes: _*)
val aggResultRowType = new RowTypeInfo(aggResultTypes: _*)
val aggFunction = new AggregateAggFunction(genFunction)
(aggFunction, accumulatorRowType, aggResultRowType)
}
/**
* Return true if all aggregates can be partially merged. False otherwise.
*/
private[flink] def doAllSupportPartialMerge(
aggregateCalls: Seq[AggregateCall],
inputType: RelDataType,
groupKeysCount: Int): Boolean = {
val aggregateList = transformToAggregateFunctions(
aggregateCalls,
inputType,
needRetraction = false)._2
doAllSupportPartialMerge(aggregateList)
}
/**
* Return true if all aggregates can be partially merged. False otherwise.
*/
private[flink] def doAllSupportPartialMerge(
aggregateList: Array[TableAggregateFunction[_ <: Any, _ <: Any]]): Boolean = {
aggregateList.forall(ifMethodExistInFunction("merge", _))
}
/**
* @return A mappings of field positions from input type to output type for grouping keys and
* aggregates.
*/
private def getOutputMappings(
namedAggregates: Seq[CalcitePair[AggregateCall, String]],
groupings: Array[Int],
inputType: RelDataType,
outputType: RelDataType) : (Array[(Int, Int)], Array[(Int, Int)]) = {
val groupKeyNames: Seq[(String, Int)] =
groupings.map(g => (inputType.getFieldList.get(g).getName, g))
val aggNames: Seq[(String, Int)] =
namedAggregates.zipWithIndex.map(a => (a._1.right, a._2))
val groupOutMapping: Array[(Int, Int)] =
groupKeyNames.map(g => (outputType.getField(g._1, false, false).getIndex, g._2)).toArray
val aggOutMapping: Array[(Int, Int)] =
aggNames.map(a => (outputType.getField(a._1, false, false).getIndex, a._2)).toArray
(groupOutMapping, aggOutMapping)
}
/**
* Determines the mapping of grouping keys to boolean indicators that describe the
* current grouping set.
*
* E.g.: Given we group on f1 and f2 of the input type, the output type contains two
* boolean indicator fields i$f1 and i$f2.
*/
private def getGroupingSetsIndicatorMapping(
inputType: RelDataType,
outputType: RelDataType): Array[(Int, Int)] = {
val inputFields = inputType.getFieldList.map(_.getName)
// map from field -> i$field or field -> i$field_0
val groupingFields = inputFields.map(inputFieldName => {
val base = "i$" + inputFieldName
var name = base
var i = 0
while (inputFields.contains(name)) {
name = base + "_" + i // if i$XXX is already a field it will be suffixed by _NUMBER
i = i + 1
}
inputFieldName -> name
}).toMap
val outputFields = outputType.getFieldList
var mappingsBuffer = ArrayBuffer[(Int, Int)]()
for (i <- outputFields.indices) {
for (j <- outputFields.indices) {
val possibleKey = outputFields(i).getName
val possibleIndicator1 = outputFields(j).getName
// get indicator for output field
val possibleIndicator2 = groupingFields.getOrElse(possibleKey, null)
// check if indicator names match
if (possibleIndicator1 == possibleIndicator2) {
mappingsBuffer += ((i, j))
}
}
}
mappingsBuffer.toArray
}
private def isTimeWindow(window: LogicalWindow) = {
window match {
case TumblingGroupWindow(_, _, size) => isTimeIntervalLiteral(size)
case SlidingGroupWindow(_, _, size, _) => isTimeIntervalLiteral(size)
case SessionGroupWindow(_, _, _) => true
}
}
private[flink] def computeWindowPropertyPos(
properties: Seq[NamedWindowProperty]): (Option[Int], Option[Int], Option[Int]) = {
val propPos = properties.foldRight(
(None: Option[Int], None: Option[Int], None: Option[Int], 0)) {
case (p, (s, e, t, i)) => p match {
case NamedWindowProperty(_, prop) =>
prop match {
case WindowStart(_) if s.isDefined =>
throw new TableException("Duplicate WindowStart property encountered. This is a bug.")
case WindowStart(_) =>
(Some(i), e, t, i - 1)
case WindowEnd(_) if e.isDefined =>
throw new TableException("Duplicate WindowEnd property encountered. This is a bug.")
case WindowEnd(_) =>
(s, Some(i), t, i - 1)
case RowtimeAttribute(_) if t.isDefined =>
throw new TableException(
"Duplicate Window rowtime property encountered. This is a bug.")
case RowtimeAttribute(_) =>
(s, e, Some(i), i - 1)
}
}
}
(propPos._1, propPos._2, propPos._3)
}
private def transformToAggregateFunctions(
aggregateCalls: Seq[AggregateCall],
inputType: RelDataType,
needRetraction: Boolean,
isStateBackedDataViews: Boolean = false)
: (Array[Array[Int]],
Array[TableAggregateFunction[_, _]],
Array[TypeInformation[_]],
Array[Seq[DataViewSpec[_]]]) = {
// store the aggregate fields of each aggregate function, by the same order of aggregates.
val aggFieldIndexes = new Array[Array[Int]](aggregateCalls.size)
val aggregates = new Array[TableAggregateFunction[_ <: Any, _ <: Any]](aggregateCalls.size)
val accTypes = new Array[TypeInformation[_]](aggregateCalls.size)
// create aggregate function instances by function type and aggregate field data type.
aggregateCalls.zipWithIndex.foreach { case (aggregateCall, index) =>
val argList: util.List[Integer] = aggregateCall.getArgList
if (argList.isEmpty) {
if (aggregateCall.getAggregation.isInstanceOf[SqlCountAggFunction]) {
aggFieldIndexes(index) = Array[Int](0)
} else {
throw new TableException("Aggregate fields should not be empty.")
}
} else {
aggFieldIndexes(index) = argList.asScala.map(i => i.intValue).toArray
}
val sqlTypeName = inputType.getFieldList.get(aggFieldIndexes(index)(0)).getType
.getSqlTypeName
aggregateCall.getAggregation match {
case _: SqlSumAggFunction =>
if (needRetraction) {
aggregates(index) = sqlTypeName match {
case TINYINT =>
new ByteSumWithRetractAggFunction
case SMALLINT =>
new ShortSumWithRetractAggFunction
case INTEGER =>
new IntSumWithRetractAggFunction
case BIGINT =>
new LongSumWithRetractAggFunction
case FLOAT =>
new FloatSumWithRetractAggFunction
case DOUBLE =>
new DoubleSumWithRetractAggFunction
case DECIMAL =>
new DecimalSumWithRetractAggFunction
case sqlType: SqlTypeName =>
throw new TableException(s"Sum aggregate does no support type: '$sqlType'")
}
} else {
aggregates(index) = sqlTypeName match {
case TINYINT =>
new ByteSumAggFunction
case SMALLINT =>
new ShortSumAggFunction
case INTEGER =>
new IntSumAggFunction
case BIGINT =>
new LongSumAggFunction
case FLOAT =>
new FloatSumAggFunction
case DOUBLE =>
new DoubleSumAggFunction
case DECIMAL =>
new DecimalSumAggFunction
case sqlType: SqlTypeName =>
throw new TableException(s"Sum aggregate does no support type: '$sqlType'")
}
}
case _: SqlSumEmptyIsZeroAggFunction =>
if (needRetraction) {
aggregates(index) = sqlTypeName match {
case TINYINT =>
new ByteSum0WithRetractAggFunction
case SMALLINT =>
new ShortSum0WithRetractAggFunction
case INTEGER =>
new IntSum0WithRetractAggFunction
case BIGINT =>
new LongSum0WithRetractAggFunction
case FLOAT =>
new FloatSum0WithRetractAggFunction
case DOUBLE =>
new DoubleSum0WithRetractAggFunction
case DECIMAL =>
new DecimalSum0WithRetractAggFunction
case sqlType: SqlTypeName =>
throw new TableException(s"Sum0 aggregate does no support type: '$sqlType'")
}
} else {
aggregates(index) = sqlTypeName match {
case TINYINT =>
new ByteSum0AggFunction
case SMALLINT =>
new ShortSum0AggFunction
case INTEGER =>
new IntSum0AggFunction
case BIGINT =>
new LongSum0AggFunction
case FLOAT =>
new FloatSum0AggFunction
case DOUBLE =>
new DoubleSum0AggFunction
case DECIMAL =>
new DecimalSum0AggFunction
case sqlType: SqlTypeName =>
throw new TableException(s"Sum0 aggregate does no support type: '$sqlType'")
}
}
case _: SqlAvgAggFunction =>
aggregates(index) = sqlTypeName match {
case TINYINT =>
new ByteAvgAggFunction
case SMALLINT =>
new ShortAvgAggFunction
case INTEGER =>
new IntAvgAggFunction
case BIGINT =>
new LongAvgAggFunction
case FLOAT =>
new FloatAvgAggFunction
case DOUBLE =>
new DoubleAvgAggFunction
case DECIMAL =>
new DecimalAvgAggFunction
case sqlType: SqlTypeName =>
throw new TableException(s"Avg aggregate does no support type: '$sqlType'")
}
case sqlMinMaxFunction: SqlMinMaxAggFunction =>
aggregates(index) = if (sqlMinMaxFunction.getKind == SqlKind.MIN) {
if (needRetraction) {
sqlTypeName match {
case TINYINT =>
new ByteMinWithRetractAggFunction
case SMALLINT =>
new ShortMinWithRetractAggFunction
case INTEGER =>
new IntMinWithRetractAggFunction
case BIGINT =>
new LongMinWithRetractAggFunction
case FLOAT =>
new FloatMinWithRetractAggFunction
case DOUBLE =>
new DoubleMinWithRetractAggFunction
case DECIMAL =>
new DecimalMinWithRetractAggFunction
case BOOLEAN =>
new BooleanMinWithRetractAggFunction
case VARCHAR | CHAR =>
new StringMinWithRetractAggFunction
case sqlType: SqlTypeName =>
throw new TableException(
s"Min with retract aggregate does no support type: '$sqlType'")
}
} else {
sqlTypeName match {
case TINYINT =>
new ByteMinAggFunction
case SMALLINT =>
new ShortMinAggFunction
case INTEGER =>
new IntMinAggFunction
case BIGINT =>
new LongMinAggFunction
case FLOAT =>
new FloatMinAggFunction
case DOUBLE =>
new DoubleMinAggFunction
case DECIMAL =>
new DecimalMinAggFunction
case BOOLEAN =>
new BooleanMinAggFunction
case VARCHAR | CHAR =>
new StringMinAggFunction
case sqlType: SqlTypeName =>
throw new TableException(s"Min aggregate does no support type: '$sqlType'")
}
}
} else {
if (needRetraction) {
sqlTypeName match {
case TINYINT =>
new ByteMaxWithRetractAggFunction
case SMALLINT =>
new ShortMaxWithRetractAggFunction
case INTEGER =>
new IntMaxWithRetractAggFunction
case BIGINT =>
new LongMaxWithRetractAggFunction
case FLOAT =>
new FloatMaxWithRetractAggFunction
case DOUBLE =>
new DoubleMaxWithRetractAggFunction
case DECIMAL =>
new DecimalMaxWithRetractAggFunction
case BOOLEAN =>
new BooleanMaxWithRetractAggFunction
case VARCHAR | CHAR =>
new StringMaxWithRetractAggFunction
case sqlType: SqlTypeName =>
throw new TableException(
s"Max with retract aggregate does no support type: '$sqlType'")
}
} else {
sqlTypeName match {
case TINYINT =>
new ByteMaxAggFunction
case SMALLINT =>
new ShortMaxAggFunction
case INTEGER =>
new IntMaxAggFunction
case BIGINT =>
new LongMaxAggFunction
case FLOAT =>
new FloatMaxAggFunction
case DOUBLE =>
new DoubleMaxAggFunction
case DECIMAL =>
new DecimalMaxAggFunction
case BOOLEAN =>
new BooleanMaxAggFunction
case VARCHAR | CHAR =>
new StringMaxAggFunction
case sqlType: SqlTypeName =>
throw new TableException(s"Max aggregate does no support type: '$sqlType'")
}
}
}
case _: SqlCountAggFunction =>
aggregates(index) = new CountAggFunction
case udagg: AggSqlFunction =>
aggregates(index) = udagg.getFunction
accTypes(index) = udagg.accType
case unSupported: SqlAggFunction =>
throw new TableException(s"unsupported Function: '${unSupported.getName}'")
}
}
val accSpecs = new Array[Seq[DataViewSpec[_]]](aggregateCalls.size)
// create accumulator type information for every aggregate function
aggregates.zipWithIndex.foreach { case (agg, index) =>
if (accTypes(index) != null) {
val (accType, specs) = removeStateViewFieldsFromAccTypeInfo(index,
agg,
accTypes(index),
isStateBackedDataViews)
if (specs.isDefined) {
accSpecs(index) = specs.get
accTypes(index) = accType
} else {
accSpecs(index) = Seq()
}
} else {
accSpecs(index) = Seq()
accTypes(index) = getAccumulatorTypeOfAggregateFunction(agg)
}
}
(aggFieldIndexes, aggregates, accTypes, accSpecs)
}
private def createRowTypeForKeysAndAggregates(
groupings: Array[Int],
aggregates: Array[TableAggregateFunction[_, _]],
aggTypes: Array[TypeInformation[_]],
inputType: RelDataType,
windowKeyTypes: Option[Array[TypeInformation[_]]] = None): RowTypeInfo = {
// get the field data types of group keys.
val groupingTypes: Seq[TypeInformation[_]] =
groupings
.map(inputType.getFieldList.get(_).getType)
.map(FlinkTypeFactory.toTypeInfo)
// concat group key types, aggregation types, and window key types
val allFieldTypes: Seq[TypeInformation[_]] = windowKeyTypes match {
case None => groupingTypes ++: aggTypes
case _ => groupingTypes ++: aggTypes ++: windowKeyTypes.get
}
new RowTypeInfo(allFieldTypes: _*)
}
private def getTimeFieldPosition(
timeField: Expression,
inputType: RelDataType,
isParserCaseSensitive: Boolean): Int = {
timeField match {
case ResolvedFieldReference(name, _) =>
// get the RelDataType referenced by the time-field
val relDataType = inputType.getFieldList.filter { r =>
if (isParserCaseSensitive) {
name.equals(r.getName)
} else {
name.equalsIgnoreCase(r.getName)
}
}
// should only match one
if (relDataType.length == 1) {
relDataType.head.getIndex
} else {
throw TableException(
s"Encountered more than one time attribute with the same name: '$relDataType'")
}
case e => throw TableException(
"The time attribute of window in batch environment should be " +
s"ResolvedFieldReference, but is $e")
}
}
private[flink] def asLong(expr: Expression): Long = expr match {
case Literal(value: Long, TimeIntervalTypeInfo.INTERVAL_MILLIS) => value
case Literal(value: Long, RowIntervalTypeInfo.INTERVAL_ROWS) => value
case _ => throw new IllegalArgumentException()
}
private[flink] def determineLargestTumblingSize(size: Long, slide: Long) = gcd(size, slide)
private def gcd(a: Long, b: Long): Long = {
if (b == 0) a else gcd(b, a % b)
}
}
| zohar-mizrahi/flink | flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/aggregate/AggregateUtil.scala | Scala | apache-2.0 | 54,644 |
package com.rocketfuel.sdbc.base.jdbc
import org.scalatest.FunSuite
import shapeless._
import TestDbms._
import shapeless.record.Record
/**
* Test cases taken from https://github.com/tpolecat/doobie/blob/c8a273c365edf5a583621fbfd77a49297986d82f/core/src/test/scala/doobie/util/composite.scala
*/
class CompositeGetterSpec
extends FunSuite {
case class Woozle(a: (String, Int), b: Int :: String :: HNil, c: Boolean)
test("CompositeGetter[Int]") {
assertCompiles("CompositeGetter[Int]")
}
test("CompositeGetter[(Int, Int)]") {
assertCompiles("CompositeGetter[(Int, Int)]")
}
test("CompositeGetter[(Int, Int, String)]") {
assertCompiles("CompositeGetter[(Int, Int, String)]")
}
test("CompositeGetter[(Int, (Int, String))]") {
assertCompiles("CompositeGetter[(Int, (Int, String))]")
}
test("CompositeGetter[Woozle]") {
assertCompiles("CompositeGetter[Woozle]")
}
test("CompositeGetter[(Woozle, String)]") {
assertCompiles("CompositeGetter[(Woozle, String)]")
}
test("CompositeGetter[(Int, Woozle :: Woozle :: String :: HNil)]") {
assertCompiles("CompositeGetter[(Int, Woozle :: Woozle :: String :: HNil)]")
}
test("shapeless record") {
type DL = (Int, String)
type A = Record.`'foo -> Int, 'bar -> String, 'baz -> DL, 'quz -> Woozle`.T
assertCompiles("CompositeGetter[A]")
assertCompiles("CompositeGetter[(A, A)]")
}
test("ConnectedRow#apply works") {
assertCompiles("val row: TestDbms.ConnectedRow = ???; val _ = row[String](???)")
}
test("ConnectedRow#apply works for optional value") {
assertCompiles("val row: TestDbms.ConnectedRow = ???; val _ = row[Option[String]](???)")
}
}
| rocketfuel/sdbc | jdbc/src/test/scala/com/rocketfuel/sdbc/base/jdbc/CompositeGetterSpec.scala | Scala | bsd-3-clause | 1,691 |
package at.logic.gapt.proofs.ceres
import at.logic.gapt.algorithms.rewriting.DefinitionElimination
import at.logic.gapt.expr._
import at.logic.gapt.formats.llk.HybridLatexParser
import at.logic.gapt.proofs.ceres.projections.Projections
import at.logic.gapt.proofs.ceres.struct.StructCreators
import at.logic.gapt.proofs.{ Ant, Suc, HOLSequent }
import at.logic.gapt.proofs.lk._
import at.logic.gapt.proofs.lk.base._
import at.logic.gapt.utils.testing.ClasspathFileCopier
import org.specs2.mutable._
import at.logic.gapt.proofs.lksk._
import at.logic.gapt.proofs.ral._
import at.logic.gapt.proofs.ceres.clauseSets._
/**
* Created by marty on 6/18/15.
*/
class ceres_omegaTest extends Specification with ClasspathFileCopier {
def prepareProof( file: String, proofname: String ) = {
val p = HybridLatexParser( tempCopyOfClasspathFile( file ) )
val elp = AtomicExpansion( DefinitionElimination( p.Definitions, regularize( p.proof( proofname ) ) ) )
val selp = LKToLKsk( elp )
val struct = StructCreators.extract( selp )
val ls = SimpleStandardClauseSet( struct )
val proj = Projections( selp )
( selp, ls, struct, proj )
}
def refutation1( cs: Set[HOLSequent] ) = {
val Some( c1 ) = cs.find( x => ( x.antecedent.size == 1 ) && ( x.succedent.size == 1 ) )
val Some( c2 ) = cs.find( x => ( x.antecedent.size == 1 ) && ( x.succedent.size == 0 ) )
val Some( c3 ) = cs.find( x => ( x.antecedent.size == 0 ) && ( x.succedent.size == 1 ) )
val y = Var( "y", Ti )
val x0 = Var( "x0", Ti )
val p = Const( "P", Ti -> ( Ti -> To ) )
val y0 = Var( "Y0", Ti -> To )
val s = c2.antecedent( 0 ) match { case HOLAtom( _, List( s, _ ) ) => s }
val sub1 = Substitution( y0, Abs( y, HOLAtom( p, List( s, y ) ) ) )
val sub2 = Substitution( x0, s )
val r1 = RalInitial( c1 map { Set[LambdaExpression]() -> _ } )
val r2 = RalInitial( c2 map { Set[LambdaExpression]() -> _ } )
val r3 = RalInitial( c3 map { Set[LambdaExpression]() -> _ } )
val r4 = RalSub( r1, sub1 )
val r3a = RalSub( r3, sub2 )
val r5 = RalCut( r3a, Seq( Suc( 0 ) ), r4, Seq( Ant( 0 ) ) )
val r6 = RalCut( r5, Seq( Suc( 0 ) ), r2, Seq( Ant( 0 ) ) )
r6
}
def refutation2( cs: Set[HOLSequent] ) = {
val Some( c1 ) = cs.find( x => ( x.antecedent.size == 1 ) && ( x.succedent.size == 1 ) )
val Some( c2 ) = cs.find( x => ( x.antecedent.size == 1 ) && ( x.succedent.size == 0 ) )
val Some( c3 ) = cs.find( x => ( x.antecedent.size == 0 ) && ( x.succedent.size == 1 ) )
val r1 = RalInitial( c1 map { Set[LambdaExpression]() -> _ } )
val r2 = RalInitial( c2 map { Set[LambdaExpression]() -> _ } )
val r3 = RalInitial( c3 map { Set[LambdaExpression]() -> _ } )
}
"Ceres_omega" should {
"handle a proof with a manual refutation (1)" in {
val ( p, cs, struct, proj ) = prepareProof( "llk/simple-leibnizeq.llk", "THEPROOF" )
val rp = refutation1( cs )
val ( acnf, _ ) = ceres_omega( proj, rp, sequentToLabelledSequent( p.root ), struct )
val et = LKskToExpansionProof( acnf )
ok
}
}
}
| loewenheim/gapt | src/test/scala/at/logic/gapt/proofs/ceres/ceres_omegaTest.scala | Scala | gpl-3.0 | 3,112 |
object Absolute1 {
def absolute(f: Int => Int): Int => Int =
n => f(n).abs // This uses the built-in `abs` method on `Int`
} | ryo-murai/fpinscala-exercises | answerkey/structuringprograms/2.answer.scala | Scala | mit | 130 |
package com.twitter.finagle.mux
import com.twitter.finagle.{Context, Dtab, Service, WriteException, NoStacktrace}
import com.twitter.finagle.netty3.BufChannelBuffer
import com.twitter.finagle.mux.lease.Acting
import com.twitter.finagle.stats.StatsReceiver
import com.twitter.finagle.tracing.{Trace, Annotation}
import com.twitter.finagle.transport.Transport
import com.twitter.util.{Future, Promise, Time, Duration}
import com.twitter.conversions.time._
import java.util.logging.Logger
import org.jboss.netty.buffer.{ChannelBuffer, ChannelBuffers}
/**
* Indicates that a client request was denied by the server.
*/
object RequestNackedException
extends Exception("The request was nackd by the server")
with WriteException with NoStacktrace
/**
* Indicates that the server failed to interpret or act on the request. This
* could mean that the client sent a [[com.twitter.finagle.mux]] message type
* that the server is unable to process.
*/
case class ServerError(what: String)
extends Exception(what)
with NoStacktrace
/**
* Indicates that the server encountered an error whilst processing the client's
* request. In contrast to [[com.twitter.finagle.mux.ServerError]], a
* ServerApplicationError relates to server application failure rather than
* failure to interpret the request.
*/
case class ServerApplicationError(what: String)
extends Exception(what)
with NoStacktrace
private case class Lease(end: Time) {
def remaining: Duration = end.sinceNow
def expired: Boolean = end < Time.now
}
private object Lease {
import Message.Tlease
def parse(unit: Byte, howMuch: Long): Option[Lease] = unit match {
case Tlease.MillisDuration => Some(new Lease(howMuch.milliseconds.fromNow))
case _ => None
}
}
private object Cap extends Enumeration {
type State = Value
val Unknown, Yes, No = Value
}
object ClientDispatcher {
val ClientEnabledTraceMessage = "finagle.mux.clientEnabled"
}
/**
* A ClientDispatcher for the mux protocol.
*/
private[finagle] class ClientDispatcher (
trans: Transport[ChannelBuffer, ChannelBuffer],
sr: StatsReceiver
) extends Service[ChannelBuffer, ChannelBuffer] with Acting {
import Message._
@volatile private[this] var canDispatch: Cap.State = Cap.Unknown
@volatile private[this] var drained = false
private[this] val futureNackedException = Future.exception(RequestNackedException)
private[this] val tags = TagSet()
private[this] val reqs = TagMap[Promise[ChannelBuffer]](tags)
private[this] val log = Logger.getLogger(getClass.getName)
@volatile private[this] var lease = new Lease(Time.Top)
private[this] val gauge = sr.addGauge("current_lease_ms") {
lease.remaining.inMilliseconds
}
private[this] val leaseCounter = sr.counter("lease_counter")
private[this] val receive: Message => Unit = {
case RreqOk(tag, rep) =>
for (p <- reqs.unmap(tag))
p.setValue(rep)
case RreqError(tag, error) =>
for (p <- reqs.unmap(tag))
p.setException(ServerApplicationError(error))
case RreqNack(tag) =>
for (p <- reqs.unmap(tag))
p.setException(RequestNackedException)
case RdispatchOk(tag, _, rep) =>
for (p <- reqs.unmap(tag))
p.setValue(rep)
case RdispatchError(tag, _, error) =>
for (p <- reqs.unmap(tag))
p.setException(ServerApplicationError(error))
case RdispatchNack(tag, _) =>
for (p <- reqs.unmap(tag))
p.setException(RequestNackedException)
case Rerr(tag, error) =>
for (p <- reqs.unmap(tag))
p.setException(ServerError(error))
case Rping(tag) =>
for (p <- reqs.unmap(tag))
p.setValue(ChannelBuffers.EMPTY_BUFFER)
case Tping(tag) =>
trans.write(encode(Rping(tag)))
case Tdrain(tag) =>
drained = true
trans.write(encode(Rdrain(tag)))
case Tlease(unit, howMuch) =>
Lease.parse(unit, howMuch) foreach { newLease =>
log.fine("leased for " + newLease + " to " + trans.remoteAddress)
leaseCounter.incr()
lease = newLease
}
case m@Tmessage(tag) =>
log.warning("Did not understand Tmessage[tag=%d] %s".format(tag, m))
trans.write(encode(Rerr(tag, "badmessage")))
case m@Rmessage(tag) =>
val what = "Did not understand Rmessage[tag=%d] %s".format(tag, m)
log.warning(what)
for (p <- reqs.unmap(tag))
p.setException(BadMessageException(what))
}
private[this] val readAndAct: ChannelBuffer => Future[Nothing] =
buf => try {
val m = decode(buf)
receive(m)
loop()
} catch {
case exc: BadMessageException =>
Future.exception(exc)
}
private[this] def loop(): Future[Nothing] =
trans.read() flatMap readAndAct
loop() onFailure { case exc =>
trans.close()
for ((tag, p) <- reqs)
p.setException(exc)
}
def ping(): Future[Unit] = {
val p = new Promise[ChannelBuffer]
reqs.map(p) match {
case None =>
Future.exception(WriteException(new Exception("Exhausted tags")))
case Some(tag) =>
trans.write(encode(Tping(tag))).onFailure { case exc =>
reqs.unmap(tag)
}.flatMap(Function.const(p)).unit
}
}
def apply(req: ChannelBuffer): Future[ChannelBuffer] =
if (drained)
futureNackedException
else
dispatch(req, true)
/**
* Dispatch a request.
*
* @param req the buffer representation of the request to be dispatched
* @param traceWrite if true, tracing info will be recorded for the request.
* If case, no tracing will be performed.
*/
private def dispatch(
req: ChannelBuffer,
traceWrite: Boolean
): Future[ChannelBuffer] = {
val p = new Promise[ChannelBuffer]
val couldDispatch = canDispatch
val tag = reqs.map(p) getOrElse {
return Future.exception(WriteException(new Exception("Exhausted tags")))
}
val msg =
if (couldDispatch == Cap.No)
Treq(tag, Some(Trace.id), req)
else {
val contexts = Context.emit() map { case (k, v) =>
(BufChannelBuffer(k), BufChannelBuffer(v))
}
Tdispatch(tag, contexts.toSeq, "", Dtab.local, req)
}
if (traceWrite) {
// Record tracing info to track Mux adoption across clusters.
Trace.record(ClientDispatcher.ClientEnabledTraceMessage)
}
trans.write(encode(msg)) onFailure { case exc =>
reqs.unmap(tag)
} before {
p.setInterruptHandler { case cause =>
for (reqP <- reqs.maybeRemap(tag, new Promise[ChannelBuffer])) {
trans.write(encode(Tdiscarded(tag, cause.toString)))
reqP.setException(cause)
}
}
p
}
if (couldDispatch == Cap.Unknown) {
p onSuccess { _ =>
canDispatch = Cap.Yes
} rescue {
case ServerError(_) =>
// We've determined that the server cannot handle Tdispatch messages,
// so we fall back to a Treq and disable tracing in order to not
// double-count the request.
canDispatch = Cap.No
dispatch(req, false)
}
} else p
}
override def isAvailable = !drained && trans.isOpen
def isActive = !lease.expired
override def close(deadline: Time) = trans.close(deadline)
}
| yancl/finagle-6.22.0 | finagle-mux/src/main/scala/com/twitter/finagle/mux/Client.scala | Scala | apache-2.0 | 7,236 |
object Minimized {
trait Literal {
type F[T]
}
trait Addition { self: Literal =>
def foo: F[Int]
}
object Main {
def expression(adder: Addition & Literal) = { // error: adder.F is not defined in inferred type
adder.foo
}
}
}
| som-snytt/dotty | tests/pos/i3083.scala | Scala | apache-2.0 | 261 |
package com.twitter.finagle.memcached.integration
import com.twitter.common.application.ShutdownRegistry.ShutdownRegistryImpl
import com.twitter.common.zookeeper.ServerSet.EndpointStatus
import com.twitter.common.zookeeper.testing.ZooKeeperTestServer
import com.twitter.common.zookeeper.{CompoundServerSet, ZooKeeperUtils, ServerSets, ZooKeeperClient}
import com.twitter.conversions.time._
import com.twitter.finagle.Group
import com.twitter.finagle.cacheresolver.{ZookeeperCacheNodeGroup, CacheNode, CachePoolConfig}
import com.twitter.util.{Duration, Stopwatch, TimeoutException}
import java.io.ByteArrayOutputStream
import java.net.InetSocketAddress
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfterEach, FunSuite}
import scala.collection.JavaConversions._
@RunWith(classOf[JUnitRunner])
class CacheNodeGroupTest extends FunSuite with BeforeAndAfterEach {
/**
* Note: This integration test requires a real Memcached server to run.
*/
var shutdownRegistry: ShutdownRegistryImpl = null
var testServers: List[(TestMemcachedServer, EndpointStatus)] = List()
var serverSet: CompoundServerSet = null
var zookeeperClient: ZooKeeperClient = null
val zkPath = "/cache/test/silly-cache"
var zookeeperServer: ZooKeeperTestServer = null
override def beforeEach() {
// start zookeeper server and create zookeeper client
shutdownRegistry = new ShutdownRegistryImpl
zookeeperServer = new ZooKeeperTestServer(0, shutdownRegistry)
zookeeperServer.startNetwork()
// connect to zookeeper server
zookeeperClient = zookeeperServer.createClient(ZooKeeperClient.digestCredentials("user","pass"))
// create serverset
serverSet = new CompoundServerSet(List(
ServerSets.create(zookeeperClient, ZooKeeperUtils.EVERYONE_READ_CREATOR_ALL, zkPath)))
// start five memcached server and join the cluster
addShards(List(0, 1, 2, 3, 4))
// set cache pool config node data
val cachePoolConfig: CachePoolConfig = new CachePoolConfig(cachePoolSize = 5)
val output: ByteArrayOutputStream = new ByteArrayOutputStream
CachePoolConfig.jsonCodec.serialize(cachePoolConfig, output)
zookeeperClient.get().setData(zkPath, output.toByteArray, -1)
// a separate client which only does zk discovery for integration test
zookeeperClient = zookeeperServer.createClient(ZooKeeperClient.digestCredentials("user","pass"))
}
override def afterEach() {
// shutdown zookeeper server and client
shutdownRegistry.execute()
// shutdown memcached server
testServers foreach { case (s, _) => s.stop() }
testServers = List()
}
if (!sys.props.contains("SKIP_FLAKY")) // CSL-1735
test("doesn't blow up") {
val myPool = new ZookeeperCacheNodeGroup(zkPath, zookeeperClient)
assert(waitForMemberSize(myPool, 0, 5))
assert(myPool.members forall(_.key.isDefined))
}
if (!Option(System.getProperty("SKIP_FLAKY")).isDefined) test("add and remove") {
// the cluster initially must have 5 members
val myPool = new ZookeeperCacheNodeGroup(zkPath, zookeeperClient)
assert(waitForMemberSize(myPool, 0, 5))
var currentMembers = myPool.members
/***** start 5 more memcached servers and join the cluster ******/
// cache pool should remain the same size at this moment
addShards(List(5, 6, 7, 8, 9))
assert(waitForMemberSize(myPool, 5, 5))
assert(myPool.members == currentMembers)
// update config data node, which triggers the pool update
// cache pool cluster should be updated
updateCachePoolConfigData(10)
assert(waitForMemberSize(myPool, 5, 10))
assert(myPool.members != currentMembers)
currentMembers = myPool.members
/***** remove 2 servers from the zk serverset ******/
// cache pool should remain the same size at this moment
testServers(0)._2.leave()
testServers(1)._2.leave()
assert(waitForMemberSize(myPool, 10, 10))
assert(myPool.members == currentMembers)
// update config data node, which triggers the pool update
// cache pool should be updated
updateCachePoolConfigData(8)
assert(waitForMemberSize(myPool, 10, 8))
assert(myPool.members != currentMembers)
currentMembers = myPool.members
/***** remove 2 more then add 3 ******/
// cache pool should remain the same size at this moment
testServers(2)._2.leave()
testServers(3)._2.leave()
addShards(List(10, 11, 12))
assert(waitForMemberSize(myPool, 8, 8))
assert(myPool.members == currentMembers)
// update config data node, which triggers the pool update
// cache pool should be updated
updateCachePoolConfigData(9)
assert(waitForMemberSize(myPool, 8, 9))
assert(myPool.members != currentMembers)
currentMembers = myPool.members
}
if (!Option(System.getProperty("SKIP_FLAKY")).isDefined) test("node key remap") {
// turn on detecting key remapping
val output: ByteArrayOutputStream = new ByteArrayOutputStream
CachePoolConfig.jsonCodec.serialize(CachePoolConfig(5, detectKeyRemapping = true), output)
zookeeperClient.get().setData(zkPath, output.toByteArray, -1)
// the cluster initially must have 5 members
val myPool = new ZookeeperCacheNodeGroup(zkPath, zookeeperClient)
assert(waitForMemberSize(myPool, 0, 5))
var currentMembers = myPool.members
/***** only remap shard key should immediately take effect ******/
testServers(2)._2.leave()
testServers(3)._2.leave()
addShards(List(2, 3))
assert(waitForMemberSize(myPool, 5, 5))
assert(myPool.members != currentMembers, myPool.members + " should NOT equal to " + currentMembers)
currentMembers = myPool.members
// turn off detecting key remapping
CachePoolConfig.jsonCodec.serialize(CachePoolConfig(5, detectKeyRemapping = false), output)
zookeeperClient.get().setData(zkPath, output.toByteArray, -1)
assert(waitForMemberSize(myPool, 5, 5))
assert(myPool.members == currentMembers, myPool.members + " should NOT equal to " + currentMembers)
testServers(4)._2.leave()
addShards(List(4))
assert(waitForMemberSize(myPool, 5, 5))
assert(myPool.members == currentMembers, myPool.members + " should equal to " + currentMembers)
/***** remap shard key while adding keys should not take effect ******/
CachePoolConfig.jsonCodec.serialize(CachePoolConfig(5, detectKeyRemapping = true), output)
zookeeperClient.get().setData(zkPath, output.toByteArray, -1)
assert(waitForMemberSize(myPool, 5, 5))
testServers(0)._2.leave()
testServers(1)._2.leave()
addShards(List(5, 0, 1))
assert(waitForMemberSize(myPool, 5, 5))
assert(myPool.members == currentMembers, myPool.members + " should equal to " + currentMembers)
}
if (!Option(System.getProperty("SKIP_FLAKY")).isDefined) test("zk failures test") {
// the cluster initially must have 5 members
val myPool = new ZookeeperCacheNodeGroup(zkPath, zookeeperClient)
assert(waitForMemberSize(myPool, 0, 5))
var currentMembers = myPool.members
/***** fail the server here to verify the pool manager will re-establish ******/
// cache pool cluster should remain the same
zookeeperServer.expireClientSession(zookeeperClient)
zookeeperServer.shutdownNetwork()
assert(waitForMemberSize(myPool, 5, 5))
assert(myPool.members == currentMembers)
/***** start the server now ******/
// cache pool cluster should remain the same
zookeeperServer.startNetwork
assert(waitForMemberSize(myPool, 5, 5, 15.seconds))
assert(myPool.members == currentMembers)
/***** start 5 more memcached servers and join the cluster ******/
// update config data node, which triggers the pool update
// cache pool cluster should still be able to see undelrying pool changes
addShards(List(5, 6, 7, 8, 9))
updateCachePoolConfigData(10)
assert(waitForMemberSize(myPool, 5, 10, 5.seconds))
assert(myPool.members != currentMembers)
currentMembers = myPool.members
}
private def waitForMemberSize(pool: Group[CacheNode], current: Int, expect: Int, timeout: Duration = 15.seconds): Boolean = {
val elapsed = Stopwatch.start()
def loop(): Boolean = {
if (current != expect && pool.members.size == expect)
true // expect pool size changes
else if (current == expect && pool.members.size != expect)
false // expect pool size remains
else if (timeout < elapsed()) {
if (current != expect) throw new TimeoutException("timed out waiting for CacheNode pool to reach the expected size")
else true
}
else {
Thread.sleep(100)
loop()
}
}
loop()
}
private def updateCachePoolConfigData(size: Int) {
val cachePoolConfig: CachePoolConfig = new CachePoolConfig(cachePoolSize = size)
val output: ByteArrayOutputStream = new ByteArrayOutputStream
CachePoolConfig.jsonCodec.serialize(cachePoolConfig, output)
zookeeperClient.get().setData(zkPath, output.toByteArray, -1)
}
// create temporary zk clients for additional cache servers since we will need to
private def addShards(shardIds: List[Int]): Unit = {
shardIds.foreach { shardId =>
TestMemcachedServer.start() match {
case Some(server) =>
testServers :+= (server, serverSet.join(server.address, Map[String, InetSocketAddress](), shardId))
case None => fail("Cannot start memcached. Skipping...")
}
}
}
}
| cogitate/twitter-finagle-uuid | finagle-memcached/src/test/scala/com/twitter/finagle/memcached/integration/CacheNodeGroupTest.scala | Scala | apache-2.0 | 9,475 |
package mesosphere.marathon
package api.v2
import java.util
import javax.inject.Inject
import javax.servlet.http.HttpServletRequest
import javax.ws.rs._
import javax.ws.rs.core.{ Context, MediaType, Response }
import mesosphere.marathon.api.v2.json.Formats._
import mesosphere.marathon.api.{ EndpointsHelper, MarathonMediaType, TaskKiller, _ }
import mesosphere.marathon.core.appinfo.EnrichedTask
import mesosphere.marathon.core.async.ExecutionContexts
import mesosphere.marathon.core.condition.Condition
import mesosphere.marathon.core.group.GroupManager
import mesosphere.marathon.core.health.{ Health, HealthCheckManager }
import mesosphere.marathon.core.instance.Instance
import mesosphere.marathon.core.instance.Instance.Id
import mesosphere.marathon.core.task.Task
import mesosphere.marathon.core.task.tracker.InstanceTracker
import mesosphere.marathon.plugin.auth.{ Authenticator, Authorizer, UpdateRunSpec, ViewRunSpec }
import mesosphere.marathon.state.{ AppDefinition, PathId }
import mesosphere.marathon.stream.Implicits._
import org.slf4j.LoggerFactory
import play.api.libs.json.Json
import scala.async.Async._
import scala.concurrent.Future
@Path("v2/tasks")
class TasksResource @Inject() (
instanceTracker: InstanceTracker,
taskKiller: TaskKiller,
val config: MarathonConf,
groupManager: GroupManager,
healthCheckManager: HealthCheckManager,
val authenticator: Authenticator,
val authorizer: Authorizer) extends AuthResource {
val log = LoggerFactory.getLogger(getClass.getName)
implicit val ec = ExecutionContexts.global
@GET
@Produces(Array(MarathonMediaType.PREFERRED_APPLICATION_JSON))
@SuppressWarnings(Array("all")) /* async/await */
def indexJson(
@QueryParam("status") status: String,
@QueryParam("status[]") statuses: util.List[String],
@Context req: HttpServletRequest): Response = authenticated(req) { implicit identity =>
Option(status).map(statuses.add)
val conditionSet: Set[Condition] = statuses.flatMap(toTaskState)(collection.breakOut)
val futureEnrichedTasks = async {
val instancesBySpec = await(instanceTracker.instancesBySpec)
val instances = instancesBySpec.instancesMap.values.view.flatMap { appTasks =>
appTasks.instances.map(i => appTasks.specId -> i)
}
val appIds = instancesBySpec.allSpecIdsWithInstances
//TODO: Move to GroupManager.
val appIdsToApps: Map[PathId, Option[AppDefinition]] =
appIds.map(appId => appId -> groupManager.app(appId))(collection.breakOut)
val appToPorts = appIdsToApps.map {
case (appId, app) => appId -> app.map(_.servicePorts).getOrElse(Nil)
}
val health = await(
Future.sequence(appIds.map { appId =>
healthCheckManager.statuses(appId)
})).foldLeft(Map[Id, Seq[Health]]())(_ ++ _)
instances.flatMap {
case (appId, instance) =>
val app = appIdsToApps(appId)
if (isAuthorized(ViewRunSpec, app) && (conditionSet.isEmpty || conditionSet(instance.state.condition))) {
instance.tasksMap.values.map { task =>
EnrichedTask(
appId,
task,
instance.agentInfo,
health.getOrElse(instance.instanceId, Nil),
appToPorts.getOrElse(appId, Nil)
)
}
} else {
None
}
}.force
}
val enrichedTasks: Iterable[EnrichedTask] = result(futureEnrichedTasks)
ok(jsonObjString(
"tasks" -> enrichedTasks
))
}
@GET
@Produces(Array(MediaType.TEXT_PLAIN))
@SuppressWarnings(Array("all")) /* async/await */
def indexTxt(@Context req: HttpServletRequest): Response = authenticated(req) { implicit identity =>
result(async {
val instancesBySpec = await(instanceTracker.instancesBySpec)
val rootGroup = groupManager.rootGroup()
val appsToEndpointString = EndpointsHelper.appsToEndpointString(
instancesBySpec,
rootGroup.transitiveApps.filterAs(app => isAuthorized(ViewRunSpec, app))(collection.breakOut)
)
ok(appsToEndpointString)
})
}
@POST
@Produces(Array(MarathonMediaType.PREFERRED_APPLICATION_JSON))
@Consumes(Array(MediaType.APPLICATION_JSON))
@Path("delete")
@SuppressWarnings(Array("all")) /* async/await */
def killTasks(
@QueryParam("scale")@DefaultValue("false") scale: Boolean,
@QueryParam("force")@DefaultValue("false") force: Boolean,
@QueryParam("wipe")@DefaultValue("false") wipe: Boolean,
body: Array[Byte],
@Context req: HttpServletRequest): Response = authenticated(req) { implicit identity =>
if (scale && wipe) throw new BadRequestException("You cannot use scale and wipe at the same time.")
val taskIds = (Json.parse(body) \ "ids").as[Set[String]]
val tasksIdToAppId: Map[Instance.Id, PathId] = taskIds.map { id =>
try { Task.Id(id).instanceId -> Task.Id.runSpecId(id) }
catch { case e: MatchError => throw new BadRequestException(s"Invalid task id '$id'. [${e.getMessage}]") }
}(collection.breakOut)
def scaleAppWithKill(toKill: Map[PathId, Seq[Instance]]): Future[Response] = async {
val killAndScale = await(taskKiller.killAndScale(toKill, force))
deploymentResult(killAndScale)
}
def doKillTasks(toKill: Map[PathId, Seq[Instance]]): Future[Response] = async {
val affectedApps = tasksIdToAppId.values.flatMap(appId => groupManager.app(appId))(collection.breakOut)
// FIXME (gkleiman): taskKiller.kill a few lines below also checks authorization, but we need to check ALL before
// starting to kill tasks
affectedApps.foreach(checkAuthorization(UpdateRunSpec, _))
val killed = await(Future.sequence(toKill
.filter { case (appId, _) => affectedApps.exists(app => app.id == appId) }
.map {
case (appId, instances) => taskKiller.kill(appId, _ => instances, wipe)
})).flatten
ok(jsonObjString("tasks" -> killed.flatMap { instance =>
instance.tasksMap.valuesIterator.map { task =>
EnrichedTask(task.runSpecId, task, instance.agentInfo, Seq.empty)
}
}))
}
val futureResponse = async {
val maybeInstances: Iterable[Option[Instance]] = await(Future.sequence(tasksIdToAppId.view
.map { case (taskId, _) => instanceTracker.instancesBySpec.map(_.instance(taskId)) }))
val tasksByAppId: Map[PathId, Seq[Instance]] = maybeInstances.flatten
.groupBy(instance => instance.instanceId.runSpecId)
.map { case (appId, instances) => appId -> instances.to[Seq] }(collection.breakOut)
val response =
if (scale) scaleAppWithKill(tasksByAppId)
else doKillTasks(tasksByAppId)
await(response)
}
result(futureResponse)
}
private def toTaskState(state: String): Option[Condition] = state.toLowerCase match {
case "running" => Some(Condition.Running)
case "staging" => Some(Condition.Staging)
case _ => None
}
}
| natemurthy/marathon | src/main/scala/mesosphere/marathon/api/v2/TasksResource.scala | Scala | apache-2.0 | 7,003 |
package com.thoughtworks.deeplearning
import scalaz.{ContT, Trampoline}
import scalaz.Free.Trampoline
import scalaz.concurrent.Future
/**
* @author 杨博 (Yang Bo) <[email protected]>
*/
object FutureIsomorphism extends scalaz.Isomorphism.IsoFunctorTemplate[Future, ContT[Trampoline, Unit, ?]] {
override def to[A](fa: Future[A]): ContT[Trampoline, Unit, A] = ContT[Trampoline, Unit, A] { continue =>
Trampoline.delay(fa.unsafePerformListen(continue))
}
override def from[A](ga: ContT[Trampoline, Unit, A]): Future[A] = {
Future.Async { continue =>
ga(continue).run
}
}
}
| Atry/DeepLearning.scala | FutureIsomorphism/src/main/scala/com/thoughtworks/deeplearning/FutureIsomorphism.scala | Scala | apache-2.0 | 613 |
/*
* Wire
* Copyright (C) 2016 Wire Swiss GmbH
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.waz.znet2
import java.io.{ByteArrayInputStream, File}
import com.waz.specs.ZSpec
import com.waz.utils.{JsonDecoder, JsonEncoder}
import com.waz.znet2.http.HttpClient.{CustomErrorConstructor, Progress, ProgressCallback}
import com.waz.znet2.http.Request.UrlCreator
import com.waz.znet2.http._
import okhttp3.mockwebserver.{MockResponse, MockWebServer}
import okio.{Buffer, Okio}
import org.json.JSONObject
import com.waz.znet2.http.HttpClient.AutoDerivationOld._
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.ExecutionContext.Implicits.global
class HttpClientSpec extends ZSpec {
case class FooError(description: String)
implicit val fooErrorConstructor: CustomErrorConstructor[FooError] = new CustomErrorConstructor[FooError] {
override def constructFrom(error: HttpClient.HttpClientError): FooError = FooError("unknown error")
}
implicit val fooErrorEncoder: JsonEncoder[FooError] = new JsonEncoder[FooError] {
override def apply(data: FooError): JSONObject = JsonEncoder { o =>
o.put("description", data.description)
}
}
implicit val fooErrorDecoder: JsonDecoder[FooError] = new JsonDecoder[FooError] {
import JsonDecoder._
override def apply(implicit js: JSONObject): FooError = {
FooError(decodeString('description))
}
}
val tempFileBodyDeserializer: RawBodyDeserializer[File] = RawBodyDeserializer.createFileRawBodyDeserializer(
new File(s"${System.getProperty("java.io.tmpdir")}/http_client_tmp_${System.currentTimeMillis()}")
)
case class Foo(a: Int, b: String)
implicit val fooEncoder: JsonEncoder[Foo] = new JsonEncoder[Foo] {
override def apply(data: Foo): JSONObject = JsonEncoder { o =>
o.put("a", data.a)
o.put("b", data.b)
}
}
implicit val fooDecoder: JsonDecoder[Foo] = new JsonDecoder[Foo] {
import JsonDecoder._
override def apply(implicit js: JSONObject): Foo = {
Foo(decodeInt('a), 'b)
}
}
private def createClient(): HttpClientOkHttpImpl = {
new HttpClientOkHttpImpl(HttpClientOkHttpImpl.createOkHttpClient())
}
private var mockServer: MockWebServer = _
private implicit val urlCreator: UrlCreator = UrlCreator.create(relativeUrl => mockServer.url(relativeUrl).url())
override protected def beforeEach(): Unit = {
mockServer = new MockWebServer()
mockServer.start()
}
override protected def afterEach(): Unit = {
mockServer.shutdown()
}
feature("Http client") {
scenario("return http response when server is responding.") {
val testResponseCode = 201
val testBodyStr = "test body"
mockServer.enqueue(
new MockResponse()
.setResponseCode(testResponseCode)
.setHeader("Content-Type", "text/plain")
.setBody(testBodyStr)
)
val client = createClient()
val request = Request.Get("/test")
var response: Response[Array[Byte]] = null
noException shouldBe thrownBy { response = result { client.result[EmptyBody, Response[Array[Byte]]](request) } }
response.code shouldBe testResponseCode
new String(response.body) shouldBe testBodyStr
}
}
scenario("return decoded response body [Foo] when server is responding.") {
val testResponseCode = 201
val testResponseObject = Foo(1, "ok")
val testResponseBodyStr = fooEncoder(testResponseObject).toString
mockServer.enqueue(
new MockResponse()
.setResponseCode(testResponseCode)
.setHeader("Content-Type", "application/json")
.setBody(testResponseBodyStr)
)
val client = createClient()
val request = Request.Get("/test")
val responseObjectFuture = client.result[EmptyBody, Foo](request)
var responseObject: Foo = null
noException shouldBe thrownBy {
responseObject = result { responseObjectFuture }
}
responseObject shouldBe testResponseObject
}
scenario("return decoded response body [File] when server is responding.") {
val testResponseCode = 201
val testResponseObject = Foo(1, "ok")
val testResponseBodyStr = fooEncoder(testResponseObject).toString
mockServer.enqueue(
new MockResponse()
.setResponseCode(testResponseCode)
.setHeader("Content-Type", "application/json")
.setBody(testResponseBodyStr)
)
val client = createClient()
val request = Request.Get("/test")
implicit val deserializer: RawBodyDeserializer[File] = tempFileBodyDeserializer
val responseObjectFuture = client.result[EmptyBody, File](request)
var responseFile: File = null
noException shouldBe thrownBy {
responseFile = result { responseObjectFuture }
}
responseFile.exists() shouldBe true
scala.io.Source.fromFile(responseFile).mkString shouldBe testResponseBodyStr
}
scenario("return decoded response body [Right[_, Foo]] when response code is successful.") {
val testResponseCode = 201
val testResponseObject = Foo(1, "ok")
val testResponseBodyStr = fooEncoder(testResponseObject).toString
mockServer.enqueue(
new MockResponse()
.setResponseCode(testResponseCode)
.setHeader("Content-Type", "application/json")
.setBody(testResponseBodyStr)
)
val client = createClient()
val request = Request.Get("/test")
val responseObjectFuture = client.resultWithDecodedErrorSafe[EmptyBody, FooError, Foo](request)
var responseObject: Either[FooError, Foo] = null
noException shouldBe thrownBy {
responseObject = result { responseObjectFuture }
}
responseObject shouldBe Right(testResponseObject)
}
scenario("return decoded response body [Left[FooError, _]] when response code is unsuccessful.") {
val testResponseCode = 500
val testResponseObject = FooError("test descr")
val testResponseBodyStr = fooErrorEncoder(testResponseObject).toString
mockServer.enqueue(
new MockResponse()
.setResponseCode(testResponseCode)
.setHeader("Content-Type", "application/json")
.setBody(testResponseBodyStr)
)
val client = createClient()
val request = Request.Get("/test")
val responseObjectFuture = client.resultWithDecodedErrorSafe[EmptyBody, FooError, Foo](request)
var responseObject: Either[FooError, Foo] = null
noException shouldBe thrownBy {
responseObject = result { responseObjectFuture }
}
responseObject shouldBe Left(testResponseObject)
}
scenario("should execute upload request and call progress callback when server is responding.") {
val testResponseCode = 201
val testRequestBody = Array.fill[Byte](100000)(1)
mockServer.enqueue(
new MockResponse()
.setResponseCode(testResponseCode)
.setHeader("Content-Type", "application/octet-stream")
.setBody("we do not care")
)
val client = createClient()
val request = Request.Post("/test", body = testRequestBody)
val progressAcc = ArrayBuffer.empty[Progress]
val callback = new ProgressCallback {
override def updated(progress: Long, total: Option[Long]): Unit = progressAcc.append(Progress(progress, total))
}
noException shouldBe thrownBy {
await { client.result[Array[Byte], Response[String]](request, uploadCallback = Some(callback)) }
}
checkProgressSequence(
progressAcc.toList,
contentLength = testRequestBody.length
)
}
scenario("should execute download request and call progress callback when server is responding.") {
val testResponseCode = 200
val testResponseBody = Array.fill[Byte](100000)(1)
val buffer = new Buffer()
buffer.writeAll(Okio.source(new ByteArrayInputStream(testResponseBody)))
mockServer.enqueue(
new MockResponse()
.setResponseCode(testResponseCode)
.setHeader("Content-Type", "application/octet-stream")
.setBody(buffer)
)
val client = createClient()
val request = Request.Get("/test")
implicit val deserializer: RawBodyDeserializer[File] = tempFileBodyDeserializer
val progressAcc = ArrayBuffer.empty[Progress]
val callback = new ProgressCallback {
override def updated(progress: Long, total: Option[Long]): Unit = progressAcc.append(Progress(progress, total))
}
noException shouldBe thrownBy {
await { client.result[EmptyBody, Response[File]](request, downloadCallback = Some(callback)) }
}
checkProgressSequence(
progressAcc.toList,
contentLength = testResponseBody.length
)
}
def checkProgressSequence(list: List[Progress], contentLength: Long): Unit =
withClue(s"Progress sequence: $list") {
list.head.progress shouldBe 0
list.last.isCompleted shouldBe true
list foreach { p => p.progress should be <= p.total.getOrElse(0L) }
list zip list.tail foreach { case (prev, curr) => prev.progress should be < curr.progress }
}
}
| wireapp/wire-android-sync-engine | zmessaging/src/test/scala/com/waz/znet2/HttpClientSpec.scala | Scala | gpl-3.0 | 9,618 |
package scala.slick.jdbc.meta
import java.sql._
import scala.slick.jdbc.ResultSetInvoker
import scala.slick.ql.TypeMapperDelegate
/**
* A wrapper for a row in the ResultSet returned by DatabaseMetaData.getVersionColumns().
*/
case class MVersionColumn(
column: String, sqlType: Int, typeName: String,
columnSize: Option[Int], bufferLength: Int, decimalDigits: Option[Int], pseudoColumn: Option[Boolean]) {
def sqlTypeName = TypeMapperDelegate.typeNames.get(sqlType)
}
object MVersionColumn {
def getVersionColumns(table: MQName) = ResultSetInvoker[MVersionColumn](
_.metaData.getVersionColumns(table.catalog_?, table.schema_?, table.name) ) { r =>
MVersionColumn(r.skip<<, r<<, r<<, r<<, r<<, r<<, r.nextInt match {
case DatabaseMetaData.versionColumnPseudo => Some(true)
case DatabaseMetaData.versionColumnNotPseudo => Some(false)
case _ => None
})
}
}
| szeiger/scala-query | src/main/scala/scala/slick/jdbc/meta/MVersionColumn.scala | Scala | bsd-2-clause | 920 |
package basics.patterns
object PatternMatching {
def doMatchnigValue(value: Int): String = {
value match {
case 1 => "one"
case 2 => "two"
case _ => "unknown"
}
}
def doMatchnigType(value: Any): String = {
value match {
case _: Int => "int"
case _: Double => "double"
case _: String => "string"
case _ => "unknown"
}
}
def doMatchingCaseClass(value: MatchedClass): String = {
value match {
case MatchedClass(1) => "1"
case MatchedClass(2) => "2"
case _ => "other"
}
}
def doMatchingFunction(value: (String => Int)): String = {
value match {
case _: (String => Int) => "String to Int"
case _ => "other"
}
}
def doListMatch(value:List[_]): String = {
value match {
case List(_,0,1,_) => "Found"
case _ => "Not found"
}
}
} | szaqal/KitchenSink | Scala/01/src/main/scala/basics/patterns/PatternMatching.scala | Scala | gpl-3.0 | 874 |
package is.hail.expr.ir.lowering
import is.hail.backend.ExecuteContext
import is.hail.expr.ir.{BaseIR, TypeCheck}
import is.hail.utils._
case class LoweringPipeline(lowerings: LoweringPass*) {
assert(lowerings.nonEmpty)
final def apply(ctx: ExecuteContext, ir: BaseIR): BaseIR = {
var x = ir
lowerings.foreach { l =>
try {
x = l.apply(ctx, x)
} catch {
case e: Throwable =>
log.error(s"error while applying lowering '${ l.context }'")
throw e
}
try {
TypeCheck(x)
} catch {
case e: Throwable =>
fatal(s"error after applying ${ l.context }", e)
}
}
x
}
def noOptimization(): LoweringPipeline = LoweringPipeline(lowerings.filter(l => !l.isInstanceOf[OptimizePass]): _*)
}
object LoweringPipeline {
private val _relationalLowerer = LoweringPipeline(
OptimizePass("relationalLowerer, initial IR"),
LowerMatrixToTablePass,
OptimizePass("relationalLowerer, after LowerMatrixToTable"),
LiftRelationalValuesToRelationalLets,
LowerOrInterpretNonCompilablePass,
OptimizePass("relationalLowerer, after InterpretNonCompilable"))
private val _relationalLowererNoOpt = _relationalLowerer.noOptimization()
private val _legacyRelationalLowerer = LoweringPipeline(
OptimizePass("legacyRelationalLowerer, initial IR"),
LowerMatrixToTablePass,
OptimizePass("legacyRelationalLowerer, after LowerMatrixToTable"),
LegacyInterpretNonCompilablePass,
OptimizePass("legacyRelationalLowerer, after LegacyInterpretNonCompilable")
)
private val _legacyRelationalLowererNoOpt = _legacyRelationalLowerer.noOptimization()
private val _compileLowerer = LoweringPipeline(
OptimizePass("compileLowerer, initial IR"),
InlineApplyIR,
OptimizePass("compileLowerer, after InlineApplyIR"),
LowerArrayAggsToRunAggsPass,
OptimizePass("compileLowerer, after LowerArrayAggsToRunAggs")
)
private val _compileLowererNoOpt = _compileLowerer.noOptimization()
private val _dArrayLowerers = Map(
DArrayLowering.All -> LoweringPipeline(
OptimizePass("darrayLowerer, initial IR"),
LowerMatrixToTablePass,
OptimizePass("darrayLowerer, after LowerMatrixToTable"),
LiftRelationalValuesToRelationalLets,
LowerToDistributedArrayPass(DArrayLowering.All),
OptimizePass("darrayLowerer, after LowerToCDA")
),
DArrayLowering.TableOnly -> LoweringPipeline(
OptimizePass("darrayLowerer, initial IR"),
LowerMatrixToTablePass,
OptimizePass("darrayLowerer, after LowerMatrixToTable"),
LiftRelationalValuesToRelationalLets,
LowerToDistributedArrayPass(DArrayLowering.TableOnly),
OptimizePass("darrayLowerer, after LowerToCDA")
),
DArrayLowering.BMOnly -> LoweringPipeline(
OptimizePass("darrayLowerer, initial IR"),
LowerMatrixToTablePass,
OptimizePass("darrayLowerer, after LowerMatrixToTable"),
LiftRelationalValuesToRelationalLets,
LowerToDistributedArrayPass(DArrayLowering.BMOnly),
OptimizePass("darrayLowerer, after LowerToCDA")
))
private val _dArrayLowerersNoOpt = _dArrayLowerers.mapValues(_.noOptimization()).toMap
def relationalLowerer(optimize: Boolean): LoweringPipeline = if (optimize) _relationalLowerer else _relationalLowererNoOpt
def legacyRelationalLowerer(optimize: Boolean): LoweringPipeline =
if (optimize) _legacyRelationalLowerer else _legacyRelationalLowererNoOpt
def darrayLowerer(optimize: Boolean): Map[DArrayLowering.Type, LoweringPipeline] = if (optimize) _dArrayLowerers else _dArrayLowerersNoOpt
def compileLowerer(optimize: Boolean): LoweringPipeline =
if (optimize) _compileLowerer else _compileLowererNoOpt
}
| hail-is/hail | hail/src/main/scala/is/hail/expr/ir/lowering/LoweringPipeline.scala | Scala | mit | 3,746 |
package helpers
import play.api.Application
import scala.reflect.ClassTag
import play.api.inject.guice.GuiceApplicationBuilder
import play.api.test.Helpers._
trait InjectorSupport {
Option(System.getenv("GECKO_DRIVER_PATH")) match {
case None => println(
"*** You do not set environment variable GECKO_DRIVER_PATH. You cannot use FIREFOX for test ***"
)
case Some(p) => System.setProperty("webdriver.gecko.driver", p)
}
Option(System.getenv("CHROME_DRIVER_PATH")) match {
case None => println(
"*** You do not set environment variable CHROME_DRIVER_PATH. You cannot use CHROME for test ***"
)
case Some(p) => System.setProperty("webdriver.chrome.driver", p)
}
def appl(conf: Map[String, Any] = inMemoryDatabase()): Application = GuiceApplicationBuilder().configure(conf).build()
def inject[T](implicit app: Application, c: ClassTag[T]): T = app.injector.instanceOf[T]
}
| ruimo/store2 | test/helpers/InjectorSupport.scala | Scala | apache-2.0 | 923 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ui.jobs
import javax.servlet.http.HttpServletRequest
import scala.xml.{Attribute, Elem, Node, NodeSeq, Null, Text}
import org.apache.spark.scheduler.Schedulable
import org.apache.spark.status.{AppSummary, PoolData}
import org.apache.spark.status.api.v1.{StageData, StageStatus}
import org.apache.spark.ui.{UIUtils, WebUIPage}
/** Page showing list of all ongoing and recently finished stages and pools */
private[ui] class AllStagesPage(parent: StagesTab) extends WebUIPage("") {
private val sc = parent.sc
private val subPath = "stages"
private def isFairScheduler = parent.isFairScheduler
def render(request: HttpServletRequest): Seq[Node] = {
// For now, pool information is only accessible in live UIs
val pools = sc.map(_.getAllPools).getOrElse(Seq.empty[Schedulable]).map { pool =>
val uiPool = parent.store.asOption(parent.store.pool(pool.name)).getOrElse(
new PoolData(pool.name, Set()))
pool -> uiPool
}.toMap
val poolTable = new PoolTable(pools, parent)
val allStatuses = Seq(StageStatus.ACTIVE, StageStatus.PENDING, StageStatus.COMPLETE,
StageStatus.SKIPPED, StageStatus.FAILED)
val allStages = parent.store.stageList(null)
val appSummary = parent.store.appSummary()
val (summaries, tables) = allStatuses.map(
summaryAndTableForStatus(allStages, appSummary, _, request)).unzip
val summary: NodeSeq =
<div>
<ul class="unstyled">
{summaries.flatten}
</ul>
</div>
val poolsDescription = if (sc.isDefined && isFairScheduler) {
<span class="collapse-aggregated-poolTable collapse-table"
onClick="collapseTable('collapse-aggregated-poolTable','aggregated-poolTable')">
<h4>
<span class="collapse-table-arrow arrow-open"></span>
<a>Fair Scheduler Pools ({pools.size})</a>
</h4>
</span> ++
<div class="aggregated-poolTable collapsible-table">
{poolTable.toNodeSeq}
</div>
} else {
Seq.empty[Node]
}
val content = summary ++ poolsDescription ++ tables.flatten.flatten
UIUtils.headerSparkPage("Stages for All Jobs", content, parent)
}
private def summaryAndTableForStatus(
allStages: Seq[StageData],
appSummary: AppSummary,
status: StageStatus,
request: HttpServletRequest): (Option[Elem], Option[NodeSeq]) = {
val stages = if (status == StageStatus.FAILED) {
allStages.filter(_.status == status).reverse
} else {
allStages.filter(_.status == status)
}
if (stages.isEmpty) {
(None, None)
} else {
val killEnabled = status == StageStatus.ACTIVE && parent.killEnabled
val isFailedStage = status == StageStatus.FAILED
val stagesTable =
new StageTableBase(parent.store, request, stages, statusName(status), stageTag(status),
parent.basePath, subPath, parent.isFairScheduler, killEnabled, isFailedStage)
val stagesSize = stages.size
(Some(summary(appSummary, status, stagesSize)),
Some(table(appSummary, status, stagesTable, stagesSize)))
}
}
private def statusName(status: StageStatus): String = status match {
case StageStatus.ACTIVE => "active"
case StageStatus.COMPLETE => "completed"
case StageStatus.FAILED => "failed"
case StageStatus.PENDING => "pending"
case StageStatus.SKIPPED => "skipped"
}
private def stageTag(status: StageStatus): String = s"${statusName(status)}Stage"
private def headerDescription(status: StageStatus): String = statusName(status).capitalize
private def summaryContent(appSummary: AppSummary, status: StageStatus, size: Int): String = {
if (status == StageStatus.COMPLETE && appSummary.numCompletedStages != size) {
s"${appSummary.numCompletedStages}, only showing $size"
} else {
s"$size"
}
}
private def summary(appSummary: AppSummary, status: StageStatus, size: Int): Elem = {
val summary =
<li>
<a href={s"#${statusName(status)}"}>
<strong>{headerDescription(status)} Stages:</strong>
</a>
{summaryContent(appSummary, status, size)}
</li>
if (status == StageStatus.COMPLETE) {
summary % Attribute(None, "id", Text("completed-summary"), Null)
} else {
summary
}
}
private def table(
appSummary: AppSummary,
status: StageStatus,
stagesTable: StageTableBase,
size: Int): NodeSeq = {
val classSuffix = s"${statusName(status).capitalize}Stages"
<span id={statusName(status)}
class={s"collapse-aggregated-all$classSuffix collapse-table"}
onClick={s"collapseTable('collapse-aggregated-all$classSuffix'," +
s" 'aggregated-all$classSuffix')"}>
<h4>
<span class="collapse-table-arrow arrow-open"></span>
<a>{headerDescription(status)} Stages ({summaryContent(appSummary, status, size)})</a>
</h4>
</span> ++
<div class={s"aggregated-all$classSuffix collapsible-table"}>
{stagesTable.toNodeSeq}
</div>
}
}
| brad-kaiser/spark | core/src/main/scala/org/apache/spark/ui/jobs/AllStagesPage.scala | Scala | apache-2.0 | 5,904 |
package japgolly.scalajs.react.extra.router2
import org.scalajs.dom
import scalaz.Equal
import japgolly.scalajs.react._
import japgolly.scalajs.react.extra.assertWarn
// =====================================================================================================================
// URLs
abstract class PathLike[Self <: PathLike[Self]] {
this: Self =>
protected def make(s: String): Self
protected def str(s: Self): String
final def map(f: String => String): Self = make(f(str(this)))
final def +(p: String) : Self = map(_ + p)
final def +(p: Self) : Self = this + str(p)
final def /(p: String) : Self = endWith_/ + p
final def /(p: Self) : Self = this / str(p)
final def endWith_/ : Self = map(_.replaceFirst("/*$", "/"))
final def rtrim_/ : Self = map(_.replaceFirst("/+$", ""))
final def isEmpty : Boolean = str(this).isEmpty
final def nonEmpty : Boolean = str(this).nonEmpty
}
/**
* The prefix of all routes on a page.
*
* The router expects this to be a full URL.
* Examples: `BaseUrl("http://www.blah.com/hello")`, `BaseUrl.fromWindowOrigin / "hello"`.
*/
final case class BaseUrl(value: String) extends PathLike[BaseUrl] {
assertWarn(value contains "://", s"$this doesn't seem to be a valid URL. It's missing '://'. Consider using BaseUrl.fromWindowOrigin.")
override protected def make(s: String) = BaseUrl(s)
override protected def str(s: BaseUrl) = s.value
def apply(p: Path): AbsUrl = AbsUrl(value + p.value)
def abs : AbsUrl = AbsUrl(value)
}
object BaseUrl {
implicit def equality: Equal[BaseUrl] = Equal.equalA
def fromWindowOrigin = BaseUrl(dom.window.location.origin)
def fromWindowOrigin_/ = fromWindowOrigin.endWith_/
}
/**
* The portion of the URL after the [[BaseUrl]].
*/
final case class Path(value: String) extends PathLike[Path] {
override protected def make(s: String) = Path(s)
override protected def str(s: Path) = s.value
def abs(implicit base: BaseUrl): AbsUrl = base apply this
/**
* Attempts to remove an exact prefix and return a non-empty suffix.
*/
def removePrefix(prefix: String): Option[Path] = {
val l = prefix.length
if (value.length > l && value.startsWith(prefix))
Some(Path(value substring l))
else
None
}
}
object Path {
implicit val equality: Equal[Path] = Equal.equalA
def root = Path("")
}
/**
* An absolute URL.
*/
final case class AbsUrl(value: String) extends PathLike[AbsUrl] {
assertWarn(value contains "://", s"$this doesn't seem to be a valid URL. It's missing '://'. Consider using AbsUrl.fromWindow.")
override protected def make(s: String) = AbsUrl(s)
override protected def str(s: AbsUrl) = s.value
}
object AbsUrl {
implicit def equality: Equal[AbsUrl] = Equal.equalA
def fromWindow = AbsUrl(dom.window.location.href)
}
// =====================================================================================================================
// Actions
// If we don't extend Product with Serializable here, a method that returns both a Renderer[P] and a Redirect[P] will
// be type-inferred to "Product with Serializable with Action[P]" which breaks the Renderable & Actionable implicits.
sealed trait Action[P] extends Product with Serializable {
def map[A](f: P => A): Action[A]
}
final case class Renderer[P](f: RouterCtl[P] => ReactElement) extends Action[P] {
@inline def apply(ctl: RouterCtl[P]) = f(ctl)
override def map[A](g: P => A): Renderer[A] =
Renderer(r => f(r contramap g))
}
sealed trait Redirect[P] extends Action[P] {
override def map[A](f: P => A): Redirect[A]
}
object Redirect {
sealed trait Method
/** The current URL will not be recorded in history. User can't hit ''Back'' button to reach it. */
case object Replace extends Method
/** The current URL will be recorded in history. User can hit ''Back'' button to reach it. */
case object Push extends Method
}
final case class RedirectToPage[P](page: P, method: Redirect.Method) extends Redirect[P] {
override def map[A](f: P => A): RedirectToPage[A] =
RedirectToPage(f(page), method)
}
final case class RedirectToPath[P](path: Path, method: Redirect.Method) extends Redirect[P] {
override def map[A](f: P => A): RedirectToPath[A] =
RedirectToPath(path, method)
}
// =====================================================================================================================
// Other
/**
* Result of the router resolving a URL and reaching a conclusion about what to render.
*
* @param page Data representation (or command) of what will be drawn.
* @param render The render function provided by the rules and logic in [[RouterConfig]].
*/
final case class Resolution[P](page: P, render: () => ReactElement)
| beni55/scalajs-react | extra/src/main/scala/japgolly/scalajs/react/extra/router2/types.scala | Scala | apache-2.0 | 4,867 |
import java.io.File
import testgen.TestSuiteBuilder._
import testgen._
object HighScoresTestGenerator {
def main(args: Array[String]): Unit = {
val file = new File("src/main/resources/high-scores.json")
val code = TestSuiteBuilder.build(file,
fromLabeledTestAltFromInput(
"scores" -> Seq("scores"),
"latest" -> Seq("scores"),
"personalBest" -> Seq("scores"),
"personalTop" -> Seq("scores"),
"report" -> Seq("scores")))
println(s"-------------")
println(code)
println(s"-------------")
}
} | ricemery/xscala | testgen/src/main/scala/HighScoresTestGenerator.scala | Scala | mit | 561 |
/*
* Copyright (c) 2017. Yuriy Stul
*/
package com.stulsoft.typeinfo
import org.scalatest.{FlatSpec, Matchers}
/** Unit tests for YSTypeInfo class
*
* @author Yuriy Stul
*/
class YSTypeInfoTest extends FlatSpec with Matchers {
behavior of "YSTypeInfoTest"
"declaredFields" should "return declared fields" in {
class C1(val fieldOne: String, val fieldTwo: Int) extends YSTypeInfo
val fields = new C1("test1", 2).declaredFields
fields.length shouldBe 2
fields.exists(f => f.getName == "fieldOne") shouldBe true
fields.exists(f => f.getName == "fieldTwo") shouldBe true
}
it should "work with case class" in {
case class C1(fieldOne: String, fieldTwo: Int) extends YSTypeInfo
val fields = C1("test1", 2).declaredFields
fields.length shouldBe 2
fields.exists(f => f.getName == "fieldOne") shouldBe true
fields.exists(f => f.getName == "fieldTwo") shouldBe true
}
it should "work with inheritance" in {
class C1(val fieldOne: String, val fieldTwo: Int)
class C2(fieldOne: String, fieldTwo: Int, val fieldThree: Double)
extends C1(fieldOne = fieldOne, fieldTwo = fieldTwo)
with YSTypeInfo
val fields = new C2("one", 2, 3.0).declaredFields
fields.length shouldBe 1
fields.exists(f => f.getName == "fieldThree") shouldBe true
}
"declaredMethods" should "return declared methods" in {
class C1(val fieldOne: String, val fieldTwo: Int) extends YSTypeInfo {
def multiple(multiplier: Int): Int = {
fieldTwo * multiplier
}
}
val methods = new C1("field one", 123).declaredMethods
methods.length shouldBe 1
methods.exists(f => f.getName == "multiple") shouldBe true
}
it should "work with inheritance 1" in {
class C1(val fieldOne: String)
class C2(fieldOne: String, val fieldTwo: Int) extends C1(fieldOne) with YSTypeInfo {
def multiple(multiplier: Int): Int = {
fieldTwo * multiplier
}
}
val methods = new C2("field one", 123).declaredMethods
methods.length shouldBe 1
methods.exists(f => f.getName == "multiple") shouldBe true
}
it should "work with inheritance 2" in {
class C1(val fieldOne: String) {
def lengthCalculator: Int = fieldOne.length
}
class C2(fieldOne: String, val fieldTwo: Int) extends C1(fieldOne) with YSTypeInfo {
def multiple(multiplier: Int): Int = {
fieldTwo * multiplier
}
}
val methods = new C2("field one", 123).declaredMethods
methods.length shouldBe 1
methods.exists(f => f.getName == "multiple") shouldBe true
}
}
| ysden123/poc | ys-type-info/src/test/scala/com/stulsoft/typeinfo/YSTypeInfoTest.scala | Scala | mit | 2,597 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.