code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/*
* Copyright (C) 2015 Cotiviti Labs ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.signalcollect.triplerush.loading
import org.apache.jena.riot.Lang
import org.scalatest.Finders
import org.scalatest.fixture.{ FlatSpec, UnitFixture }
import com.signalcollect.triplerush.{ GroundTruthSpec, TestStore }
import org.semanticweb.yars.nx.parser.ParseException
import akka.actor.Kill
import akka.actor.ActorKilledException
import akka.testkit.EventFilter
class ParsingErrorSpec extends FlatSpec with UnitFixture {
"TripleIterator" should "throw an error when the file does not exist" in new TestStore {
val resource = s"does-not-exist.nt"
val tripleStream = classOf[GroundTruthSpec].getResourceAsStream(resource)
intercept[NullPointerException] {
tr.addTriples(TripleIterator(tripleStream))
}
}
}
| uzh/triplerush | src/test/scala/com/signalcollect/triplerush/loading/ParsingErrorSpec.scala | Scala | apache-2.0 | 1,381 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.utils.geotools
import java.awt.image.BufferedImage
import com.typesafe.scalalogging.LazyLogging
import org.locationtech.jts.geom._
import scala.annotation.tailrec
import scala.util.control.NonFatal
/**
* Renders geometries to a fixed-size grid of pixels
*
* @param env the rendering envelope
* @param xSize x pixel count
* @param ySize y pixel count
*/
class RenderingGrid(env: Envelope, xSize: Int, ySize: Int) extends LazyLogging {
private val grid = new GridSnap(env, xSize, ySize)
private val pixels = scala.collection.mutable.Map.empty[(Int, Int), Double].withDefaultValue(0d)
private val xMin = env.getMinX
private val xMax = env.getMaxX
private val wide = xMax - xMin > 360d
private var count = 0L
/**
* Render a point
*
* @param point geometry
* @param weight weight
*/
def render(point: Point, weight: Double): Unit = {
val j = grid.j(point.getY)
if (j != -1) {
translate(point.getX).foreach(i => pixels(i, j) += weight)
}
count += 1
}
/**
* Render a multi-point
*
* @param multiPoint geometry
* @param weight weight
*/
def render(multiPoint: MultiPoint, weight: Double): Unit = {
var i = 0
while (i < multiPoint.getNumGeometries) {
render(multiPoint.getGeometryN(i).asInstanceOf[Point], weight)
i += 1
}
count += (1 - i)
}
/**
* Render a line string
*
* @param lineString geometry
* @param weight weight
*/
def render(lineString: LineString, weight: Double): Unit = {
if (lineString.getNumPoints > 0) {
var iN, jN = -1 // track the last pixel we've written to avoid double-counting
// our working coordinates
var p0: Coordinate = null
var i0: Seq[Int] = null
var j0: Int = -1
var p1 = lineString.getCoordinateN(0)
var i1 = translate(p1.x)
var j1 = grid.j(p1.y)
var n = 1
while (n < lineString.getNumPoints) {
// increment to the next pair of points
p0 = p1
i0 = i1
j0 = j1
p1 = lineString.getCoordinateN(n)
i1 = translate(p1.x)
j1 = grid.j(p1.y)
if (i0.isEmpty || j0 == -1 || i1.isEmpty || j1 == -1) {
// line is not entirely contained in the grid region
// find the intersection of the line segment with the grid region
try {
val intersection = GeometryUtils.geoFactory.createLineString(Array(p0, p1)).intersection(grid.envelope)
if (!intersection.isEmpty) {
render(intersection, weight)
count -= 1 // don't double count
}
} catch {
case NonFatal(e) => logger.error(s"Error intersecting line string [$p0 $p1] with ${grid.envelope}", e)
}
} else {
val bresenham = grid.bresenhamLine(i0.head, j0, i1.head, j1)
// check the first point for overlap with last line segment
val (iF, jF) = bresenham.next
if (iF != iN || jF != jN) {
pixels(iF, jF) += weight
i0.tail.foreach { i0N =>
pixels(iF - i0.head + i0N, jF) += weight
}
}
if (!bresenham.hasNext) {
iN = iF
jN = jF
} else {
@tailrec
def writeNext(): Unit = {
val (i, j) = bresenham.next
pixels(i, j) += weight
i0.tail.foreach { i0N =>
pixels(i - i0.head + i0N, j) += weight
}
if (bresenham.hasNext) {
writeNext()
} else {
iN = i
jN = j
}
}
writeNext()
}
}
n += 1
}
}
count += 1
}
/**
* Render a multi-line
*
* @param multiLineString geometry
* @param weight weight
*/
def render(multiLineString: MultiLineString, weight: Double): Unit = {
var i = 0
while (i < multiLineString.getNumGeometries) {
render(multiLineString.getGeometryN(i).asInstanceOf[LineString], weight)
i += 1
}
count += (1 - i)
}
/**
* Render a polygon
*
* @param polygon geometry
* @param weight weight
*/
def render(polygon: Polygon, weight: Double): Unit = {
val envelope = polygon.getEnvelopeInternal
val imins = translate(envelope.getMinX)
val imaxes = translate(envelope.getMaxX)
val jmin = grid.j(envelope.getMinY)
val jmax = grid.j(envelope.getMaxY)
if (imins.isEmpty || imaxes.isEmpty || jmin == -1 || jmax == -1) {
// polygon is not entirely contained in the grid region
// find the intersection of the polygon with the grid region
try {
val intersection = polygon.intersection(grid.envelope)
if (!intersection.isEmpty) {
render(intersection, weight)
count -= 1 // don't double count
}
} catch {
case NonFatal(e) => logger.error(s"Error intersecting polygon [$polygon] with ${grid.envelope}", e)
}
} else {
val imin = imins.head
val iLength = imaxes.head - imin + 1
val jLength = jmax - jmin + 1
val raster = {
// use java awt graphics to draw our polygon on the grid
val image = new BufferedImage(iLength, jLength, BufferedImage.TYPE_BYTE_BINARY)
val graphics = image.createGraphics()
val border = polygon.getExteriorRing
val xPoints = Array.ofDim[Int](border.getNumPoints)
val yPoints = Array.ofDim[Int](border.getNumPoints)
var i = 0
while (i < xPoints.length) {
val coord = border.getCoordinateN(i)
xPoints(i) = translate(coord.x).head - imin
yPoints(i) = grid.j(coord.y) - jmin
i += 1
}
graphics.fillPolygon(xPoints, yPoints, xPoints.length)
image.getRaster
}
var i, j = 0
while (i < iLength) {
while (j < jLength) {
if (raster.getSample(i, j, 0) != 0) {
imins.foreach(im => pixels(i + im, j + jmin) += weight)
}
j += 1
}
j = 0
i += 1
}
}
count += 1
}
/**
* Render a multi-polygon
*
* @param multiPolygon geometry
* @param weight weight
*/
def render(multiPolygon: MultiPolygon, weight: Double): Unit = {
var i = 0
while (i < multiPolygon.getNumGeometries) {
render(multiPolygon.getGeometryN(i).asInstanceOf[Polygon], weight)
i += 1
}
count += (1 - i)
}
/**
* Render an arbitrary geometry
*
* @param geometry geometry
* @param weight weight
*/
def render(geometry: Geometry, weight: Double): Unit = {
geometry match {
case g: Point => render(g, weight)
case g: LineString => render(g, weight)
case g: Polygon => render(g, weight)
case g: MultiPoint => render(g, weight)
case g: MultiLineString => render(g, weight)
case g: MultiPolygon => render(g, weight)
case g: GeometryCollection =>
var i = 0
while (i < g.getNumGeometries) {
render(g.getGeometryN(i), weight)
i += 1
}
count += (1 - i)
case _ => throw new NotImplementedError(s"Unexpected geometry type: $geometry")
}
}
/**
* Have any pixels been rendered?
*
* @return
*/
def isEmpty: Boolean = pixels.isEmpty
/**
* Number of features rendered in this grid (not accounting for weights).
*
* May not be exact - features that are outside the grid envelope will still be counted.
*
* @return
*/
def size: Long = count
/**
* Pixel weights
*
* @return
*/
def iterator: Iterator[((Int, Int), Double)] = pixels.iterator
/**
* Clear any rendered pixels
*/
def clear(): Unit = pixels.clear()
/**
* Translate a point into the output envelope. If the envelope is larger than 360 degrees, points may
* be rendered more than once
*
* @param x longitude
* @return
*/
private def translate(x: Double): Seq[Int] = {
if (x < xMin) {
val xt = x + (360d * math.ceil((xMin - x) / 360))
if (xt > xMax) { Seq.empty } else { widen(xt) }
} else if (x > xMax) {
val xt = x - (360d * math.ceil((x - xMax) / 360))
if (xt < xMin) { Seq.empty } else { widen(xt) }
} else {
widen(x)
}
}
/**
* Returns any pixels that should be rendered for the given point
*
* @param x longitude
* @return
*/
private def widen(x: Double): Seq[Int] = {
if (wide) {
val seq = Seq.newBuilder[Int]
// start with the smallest x value greater than xMin
var xup = x - 360d * math.floor((x - xMin) / 360)
while (xup <= xMax) {
seq += grid.i(xup)
xup += 360d
}
seq.result
} else {
Seq(grid.i(x))
}
}
}
| locationtech/geomesa | geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/geotools/RenderingGrid.scala | Scala | apache-2.0 | 9,414 |
import scala.quoted.*
inline def transform(using dummyImplicit: DummyImplicit): Unit =
${ transformImpl } // error
def transformImpl(using dummyImplicit: DummyImplicit)(using Quotes): Expr[Unit] = ???
| dotty-staging/dotty | tests/neg-macros/i7839.scala | Scala | apache-2.0 | 205 |
package chandu0101.scalajs.react.components.demo.components.reactpopover
import chandu0101.scalajs.react.components.demo.components.CodeExample
import chandu0101.scalajs.react.components.demo.components.demo.LocalDemoButton
import chandu0101.scalajs.react.components.popovers.ReactPopOver
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
/**
* Created by chandrasekharkode .
*/
object ReactPopoverDemo {
val code =
"""
| ReactPopover(placement = "top", ref = theTopRef)( "I am Top Pop Over" ),
| LocalDemoButton(name = "Top Button" ,onButtonClick = B.onTopButtonClick)
|
| ReactPopover(placement = "left", ref = theLeftRef, title = "Left Title")("I am Left Popover"),
| LocalDemoButton(name = "Left Button" ,onButtonClick = B.onLeftButtonClick)
|
| ReactPopover(ref = theRightRef, title = "Right Title")("I am right Popover" ),
| LocalDemoButton(name = "Right Button" ,onButtonClick = B.onRightButtonClick)
|
| ReactPopover(placement = "bottom", ref = theBottomtRef)("I am bottom Popover" ),
| LocalDemoButton(name = "Bottom Button" ,onButtonClick = B.onBottomButtonClick)
|
""".stripMargin
object Style {
val popoverExample = Seq(^.display := "flex", ^.flexDirection := "column" , ^.alignItems := "center")
}
class Backend(t: BackendScope[_, _]) {
def onRightButtonClick(e: ReactEventH) = {
theRightRef(t).get.backend.toggle(e.target)
}
def onLeftButtonClick(e: ReactEventH) = {
theLeftRef(t).get.backend.toggle(e.target)
}
def onTopButtonClick(e: ReactEventH) = {
theTopRef(t).get.backend.toggle(e.target)
}
def onBottomButtonClick(e: ReactEventH) = {
theBottomtRef(t).get.backend.toggle(e.target)
}
}
val theRightRef = Ref.to(ReactPopOver.component, "theRightRef")
val theLeftRef = Ref.to(ReactPopOver.component, "theLeftRef")
val theTopRef = Ref.to(ReactPopOver.component, "theTopRef")
val theBottomtRef = Ref.to(ReactPopOver.component, "theBottomRef")
val component = ReactComponentB[Unit]("ReactPopoverDemo")
.stateless
.backend(new Backend(_))
.render((P, S, B) => {
<.div(
<.h3("Demo"),
CodeExample(code)(
<.div(Style.popoverExample)(
<.div(^.padding := "20px")(
ReactPopOver(placement = "top", ref = theTopRef)(
"I am Top Pop Over"
),
LocalDemoButton(name = "Top Button" ,onButtonClick = B.onTopButtonClick)
),
<.div(^.padding := "20px")(
ReactPopOver(placement = "left", ref = theLeftRef, title = "Left Title")(
"I am Left Popover"
),
LocalDemoButton(name = "Left Button" ,onButtonClick = B.onLeftButtonClick)
),
<.div(^.padding := "20px")(
ReactPopOver(ref = theRightRef, title = "Right Title")(
"I am right Popover"
),
LocalDemoButton(name = "Right Button" ,onButtonClick = B.onRightButtonClick)
),
<.div(^.padding := "20px")(
ReactPopOver(placement = "bottom", ref = theBottomtRef)(
"I am bottom Popover"
),
LocalDemoButton(name = "Bottom Button" ,onButtonClick = B.onBottomButtonClick)
)
)
)
)
}).buildU
def apply() = component()
}
| coreyauger/scalajs-react-components | demo/src/main/scala/chandu0101/scalajs/react/components/demo/components/reactpopover/ReactPopoverDemo.scala | Scala | apache-2.0 | 3,376 |
/*
* Copyright (c) 2016 SnappyData, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package io.snappydata.benchmark.memsql
import java.io.{FileOutputStream, PrintStream}
import java.sql.DriverManager
import java.util.Date
import scala.util.control.NonFatal
object ConcurrentMemsql {
def main(args: Array[String]): Unit = {
val host = args(0)
val port = 3306
val dbName = "TPCH"
val user = "root"
val password = ""
val readerThread = new Thread(new Runnable {
def run() {
Class.forName("com.mysql.jdbc.Driver")
val dbAddress = "jdbc:mysql://" + host + ":" + port + "/"
val conn = DriverManager.getConnection(dbAddress, user, password)
val stmt = conn.createStatement
stmt.execute("USE " + dbName)
val avgFileStream = new FileOutputStream(new java.io.File(s"reader.out"))
val avgPrintStream = new PrintStream(avgFileStream)
for (i <- 1 to 100000) {
var starttime = System.nanoTime()
// val rs = stmt.executeQuery("select count(*) as counter from PARTSUPP where ps_suppkey = 18692 and Ps_partkey = 7663535; ")
val rs = stmt.executeQuery("select PS_AVAILQTY as counter from PARTSUPP where ps_suppkey = 18692 and PS_partkeY = 653535")
var count = 0
while (rs.next()) {
count = rs.getInt("counter")
//just iterating over result
//count+=1
}
var timetaken = (System.nanoTime() - starttime)/1000
avgPrintStream.println(s"Total time taken $timetaken results : $count ${new Date()} ")
}
avgPrintStream.close()
}
}).start()
val writerThread = new Thread(new Runnable {
def run() {
Class.forName("com.mysql.jdbc.Driver")
val dbAddress = "jdbc:mysql://" + host + ":" + port + "/"
val conn = DriverManager.getConnection(dbAddress, user, password)
val stmt = conn.createStatement
stmt.execute("USE " + dbName)
val avgFileStream = new FileOutputStream(new java.io.File(s"writer.out"))
val avgPrintStream = new PrintStream(avgFileStream)
var startCounter = 7653535
avgPrintStream.println(s"insertion started ${new Date()}")
for (i <- 1 to 100000) {
startCounter+=1
try {
var starttime = System.nanoTime()
// val rs = stmt.execute(s"insert into PARTSUPP values ($startCounter, 18692 , 2, 4.11, 'aa') ")
val rs = stmt.execute(s"update PARTSUPP set PS_AVAILQTY = PS_AVAILQTY +1")
} catch {
case NonFatal(e) => e.printStackTrace(avgPrintStream)
}
}
avgPrintStream.println(s"insertion ended ${new Date()}")
avgPrintStream.close()
}
}).start()
}
}
| vjr/snappydata | cluster/src/test/scala/io/snappydata/benchmark/memsql/ConcurrentMemsql.scala | Scala | apache-2.0 | 3,371 |
/*
* Copyright 2016 Miroslav Janíček
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.classdump.luna.test.fragments
import org.classdump.luna.test.{FragmentBundle, FragmentExpectations, OneLiners}
import org.classdump.luna.{LuaFormat, Table}
object TableLibFragments extends FragmentBundle with FragmentExpectations with OneLiners {
in(TableContext) {
about("table.concat") {
program("""return table.concat()""") failsWith "" << "bad argument #1 to 'concat' (table expected, got no value)"
program("""return table.concat(nil)""") failsWith "" << "bad argument #1 to 'concat' (table expected, got nil)"
program("""return table.concat({}, nil)""") succeedsWith ("")
program("""return table.concat({}, false)""") failsWith "" << "bad argument #2 to 'concat' (string expected, got boolean)"
program("""return table.concat({}, true, true)""") failsWith "" << "bad argument #2 to 'concat' (string expected, got boolean)"
program("""return table.concat({}, true, true, false)""") failsWith "" << "bad argument #2 to 'concat' (string expected, got boolean)"
in(FullContext) {
// strings have the __index metamethod, but concat doesn't care about them
program("""return table.concat("hello", " ")""") failsWith "" << "bad argument #1 to 'concat' (table expected, got string)"
}
program("""return table.concat({}, "", true, false)""") failsWith "" << "bad argument #3 to 'concat' (number expected, got boolean)"
program("""return table.concat({}, "", nil, false)""") failsWith "" << "bad argument #4 to 'concat' (number expected, got boolean)"
program("""return table.concat({}, nil, nil, nil)""") succeedsWith ("")
program("""return table.concat({3, 2, 1})""") succeedsWith ("321")
program("""return table.concat({3, [4]=0, 2, 1})""") succeedsWith ("3210")
program("""return table.concat({3, x=0, 2, 1})""") succeedsWith ("321")
program("""return table.concat({[-1]=1, [-2]=2, [0]=0, [2]=-2, [1.0]=-1}, "", -2)""") succeedsWith ("210-1-2")
program("""return table.concat({[-1]=1, [-2]=2, [0]=0, [2]=-2, [1.0]=-1})""") succeedsWith ("-1-2")
program("""return table.concat({})""") succeedsWith ("")
program("""return table.concat({}, "BOO")""") succeedsWith ("")
program("""return table.concat({}, "", 50)""") succeedsWith ("")
program("""return table.concat({}, "", 0)""") failsWith "" << "invalid value (nil) at index 0 in table for 'concat'"
program("""return table.concat({}, "", -1)""") failsWith "" << "invalid value (nil) at index -1 in table for 'concat'"
program("""return table.concat({}, "", 1, 20)""") failsWith "" << "invalid value (nil) at index 1 in table for 'concat'"
program("""return table.concat({1, 2}, "", 1, 1)""") succeedsWith ("1")
program("""return table.concat({1, 2, 3, 4}, "", -2, 3)""") failsWith "" << "invalid value (nil) at index -2 in table for 'concat'"
program("""return table.concat({"hello", "world", 0})""") succeedsWith ("helloworld0")
program("""return table.concat({"hello", "world", 0}, " ")""") succeedsWith ("hello world 0")
program("""return table.concat({"a", 1, "b", 2, "c", 3}, nil)""") succeedsWith ("a1b2c3")
program("""return table.concat({"a", 1, "b"}, -0.0)""") succeedsWith ("a-0.01-0.0b")
program("""return table.concat({"a", 1, {}})""") failsWith "" << "invalid value (table) at index 3 in table for 'concat'"
program("""return table.concat({{}, {}})""") failsWith "" << "invalid value (table) at index 1 in table for 'concat'"
in(FullContext) {
program("""return table.concat({io.stdout})""") failsWith "" << "invalid value (userdata) at index 1 in table for 'concat'"
}
// concat uses the __index metamethod on the concatenated table
program(
"""local mt = {__index = function(t, k) return k end}
|return table.concat(setmetatable({}, mt), " ", 3, 5)
""") succeedsWith ("3 4 5")
// concat uses the __len metamethod on the concatenated table
program(
"""local mt = {__len = function() return 2 end}
|return table.concat(setmetatable({5, 4, 3, 2, 1, 0}, mt), " ")
""") succeedsWith ("5 4")
program("""local mt = {__len = 10}; return table.concat(setmetatable({}, mt))""") failsWith "" << "attempt to call a number value"
program("""local mt = {__len = function() return "x" end}; return table.concat(setmetatable({}, mt))""") failsWith "" << "object length is not an integer"
program("""local mt = {__len = function() return "2" end}; return table.concat(setmetatable({"a", "b", "c"}, mt))""") succeedsWith "ab"
program(
"""local mt = {__len = function() return "3.0" end}
|return table.concat(setmetatable({"a", "b", "c"}, mt))
""") succeedsWith "abc"
// length is retrieved before the 2nd argument to concat
program("""return table.concat(setmetatable({}, {__len = function() error("BOOM") end}), "", 1, true)""") failsWith "" << "BOOM"
program("""return table.concat(setmetatable({}, {__len = function() error("BOOM") end}), true, true)""") failsWith "" << "BOOM"
program("""return table.concat(setmetatable({}, {__len = function() error("BOOM") end}), false)""") failsWith "" << "BOOM"
// concat uses the __index and __len metamethods on the concatenated table
program(
"""local mt = {__index = function(t, k) return k end; __len = function() return 2 end}
|return table.concat(setmetatable({}, mt), "_", -2)
""") succeedsWith ("-2_-1_0_1_2")
// concat does not use the __tostring metamethod of table elements
program(
"""local mt = {__tostring = function() return "{}" end}
|return table.concat({"a", 1, setmetatable({}, mt)})
""") failsWith "" << "invalid value (table) at index 3 in table for 'concat'"
// concat does not use the __concat metamethod of table elements
program(
"""local mt = {__concat = function() return "{}" end}
|return table.concat({"a", 1, setmetatable({}, mt)})
""") failsWith "" << "invalid value (table) at index 3 in table for 'concat'"
}
about("table.insert") {
program("table.insert()") failsWith "" << "bad argument #1 to 'insert' (table expected, got no value)"
program("table.insert(nil)") failsWith "" << "bad argument #1 to 'insert' (table expected, got nil)"
program("table.insert(123)") failsWith "" << "bad argument #1 to 'insert' (table expected, got number)"
program("table.insert({})") failsWith "" << "wrong number of arguments to 'insert'"
program("table.insert({}, false, false)") failsWith "" << "bad argument #2 to 'insert' (number expected, got boolean)"
program("table.insert({}, 1, 2, 3)") failsWith "" << "wrong number of arguments to 'insert'"
program("table.insert({}, 1, 2, nil)") failsWith "" << "wrong number of arguments to 'insert'"
program("""local t = {}; table.insert(t, "a"); return #t, t[1]""") succeedsWith(1, "a")
program("""local t = {"a"}; table.insert(t, "b"); return #t, t[1], t[2]""") succeedsWith(2, "a", "b")
program("""local t = {"a"}; table.insert(t, 1, "b"); return #t, t[1], t[2]""") succeedsWith(2, "b", "a")
program("""local t = {"a"}; table.insert(t, "1", "b"); return #t, t[1], t[2]""") succeedsWith(2, "b", "a")
program("""local t = {"a"}; table.insert(t, "1.0", "b"); return #t, t[1], t[2]""") succeedsWith(2, "b", "a")
program("""table.insert({}, 0, "x")""") failsWith "" << "bad argument #2 to 'insert' (position out of bounds)"
program("""table.insert({}, 0, nil)""") failsWith "" << "bad argument #2 to 'insert' (position out of bounds)"
program("""table.insert({}, 10, "x")""") failsWith "" << "bad argument #2 to 'insert' (position out of bounds)"
program("""table.insert({}, 10, nil)""") failsWith "" << "bad argument #2 to 'insert' (position out of bounds)"
// __len metamethod
program("""table.insert(setmetatable({}, {__len = function() error("BOOM") end}), 10)""") failsWith "" << "BOOM"
program("""table.insert(setmetatable({}, {__len = function() return 1.1 end}), 10)""") failsWith "" << "object length is not an integer"
// length is queried before processing the rest of the arguments
program("""table.insert(setmetatable({}, {__len = function() error("BOOM") end}), false, false)""") failsWith "" << "BOOM"
program("""table.insert(setmetatable({}, {__len = function() error("BOOM") end}), 1, 2, nil)""") failsWith "" << "BOOM"
program("""local t = setmetatable({}, {__len = function() return -1 end}); table.insert(t, "x"); return #t, t[0]""") succeedsWith(-1, "x")
program("""local t = setmetatable({}, {__len = function() return "-1" end}); table.insert(t, "x"); return #t, t[0]""") succeedsWith("-1", "x")
program("""local t = setmetatable({}, {__len = function() return "-0.0" end}); table.insert(t, "x"); return #t, t[1]""") succeedsWith("-0.0", "x")
program("""table.insert(setmetatable({}, {__len = function() return -1 end}), 0, "x")""") failsWith "" << "bad argument #2 to 'insert' (position out of bounds)"
program("""table.insert(setmetatable({}, {__len = function() return 2 end}), 4, "x")""") failsWith "" << "bad argument #2 to 'insert' (position out of bounds)"
program("""table.insert(setmetatable({}, {__len = function() error("Boom.") end}), 0, "x")""") failsWith "" << "Boom."
// __index and __newindex
program("""local t = setmetatable({"x"}, {__index = error}); table.insert(t, 1, "y"); return #t, t[1], t[2]""") succeedsWith(2, "y", "x")
program("""table.insert(setmetatable({"x"}, {__newindex = function(t, k, v) error(k..v) end}), "y")""") failsWith "" << "2y"
program("""table.insert(setmetatable({"x"}, {__newindex = function(t, k, v) error(k..v) end}), 1, "y")""") failsWith "" << "2x"
program(
"""-- meta #1
|local ks = ""
|local n = 0
|local t = setmetatable({}, {
| __len = function() return n end;
| __newindex = function(t, k, v) n = n + 1; ks = ks.."["..k..tostring(v).."]"; rawset(t, k, v) end;
| __index = function(t, k) ks = ks.."{"..k.."}"; return rawget(t, k) end
|})
|
|table.insert(t, "a")
|table.insert(t, "b")
|table.insert(t, 1, "c")
|t[2] = nil
|table.insert(t, 1, "d")
|return ks, #t, t[1], t[2], t[3], t[4], t[5]
"""
) succeedsWith("[1a][2b][3b][4b]{2}[2c]", 5, "d", "c", null, "b", null)
program(
"""-- meta #2
|local ks = ""
|local n = 0
|local t = setmetatable({}, {
| __len = function() return n end;
| __newindex = function(t, k, v) n = n + 1; ks = ks.."["..k..tostring(v).."]"; rawset(t, k, v) end;
| __index = function(t, k) ks = ks.."{"..k.."}"; return rawget(t, k) end
|})
|
|table.insert(t, "a")
|table.insert(t, "b")
|table.insert(t, 1, "c")
|n = n - 1
|rawset(t, 1, nil)
|table.insert(t, 1, "d")
|return ks, #t, t[1], t[2], t[3], t[4], t[5]
"""
) succeedsWith("[1a][2b][3b]{1}[1d]", 3, "d", null, "a", null, null)
program(
"""-- meta #3
|local ks = ""
|local n = 0
|local t = setmetatable({}, {
| __len = function() return n end;
| __newindex = function(t, k, v) n = n + 1; ks = ks.."["..k..tostring(v).."]"; rawset(t, k, v) end;
| __index = function(t, k) ks = ks.."{"..k.."}"; return rawget(t, k) end
|})
|
|table.insert(t, "a")
|table.insert(t, "b")
|table.insert(t, 1, "c")
|n = n - 1
|rawset(t, 2, nil)
|table.insert(t, 1, "d")
|return ks, #t, t[1], t[2], t[3], t[4], t[5]
"""
) succeedsWith("[1a][2b][3b]{2}[2c]", 3, "d", "c", null, null, null)
program(
"""-- meta #4
|local ks = ""
|local n = 0
|local t = setmetatable({}, {
| __len = function() return n end;
| __newindex = function(t, k, v) n = n + 1; ks = ks.."["..k..tostring(v).."]"; rawset(t, k, v) end;
| __index = function(t, k) ks = ks.."{"..k.."}"; return rawget(t, k) end
|})
|
|table.insert(t, "a")
|table.insert(t, "b")
|table.insert(t, 1, "c")
|n = n - 1
|rawset(t, 3, nil)
|table.insert(t, 1, "d")
|return ks, #t, t[1], t[2], t[3], t[4], t[5]
"""
) succeedsWith("[1a][2b][3b][3a]", 3, "d", "c", "a", null, null)
program(
"""-- meta #5
|local ks = ""
|local n = 0
|local t = setmetatable({}, {
| __len = function() return n end;
| __newindex = function(t, k, v) n = n + 1; ks = ks.."["..k..tostring(v).."]"; rawset(t, k, v) end;
| __index = function(t, k) ks = ks.."{"..k.."}"; return rawget(t, k) end
|})
|
|table.insert(t, "a")
|table.insert(t, "b")
|table.insert(t, 1, "c")
|n = n - 1
|rawset(t, 4, nil)
|table.insert(t, 1, "d")
|return ks, #t, t[1], t[2], t[3], t[4], t[5]
"""
) succeedsWith("[1a][2b][3b]", 2, "d", "c", "a", null, null)
}
about("table.move") {
program("""table.move()""") failsWith "" << "bad argument #2 to 'move' (number expected, got no value)"
program("""table.move(nil)""") failsWith "" << "bad argument #2 to 'move' (number expected, got no value)"
program("""table.move(nil, nil)""") failsWith "" << "bad argument #2 to 'move' (number expected, got nil)"
program("""table.move(nil, "x")""") failsWith "" << "bad argument #2 to 'move' (number expected, got string)"
program("""table.move(nil, 1)""") failsWith "" << "bad argument #3 to 'move' (number expected, got no value)"
program("""table.move(nil, 1, 1)""") failsWith "" << "bad argument #4 to 'move' (number expected, got no value)"
program("""table.move(nil, 1, 1, 1)""") failsWith "" << "bad argument #1 to 'move' (table expected, got nil)"
program("""table.move({}, 1, 1, 1, "x")""") failsWith "" << "bad argument #5 to 'move' (table expected, got string)"
program("""table.move("x", 1, 1, 1, "y")""") failsWith "" << "bad argument #5 to 'move' (table expected, got string)"
program("""table.move({}, 1 << 63, (1 << 63) - 1, 0)""") failsWith "" << "bad argument #3 to 'move' (too many elements to move)"
program("""table.move({}, 0, (1 << 63) - 1, 0)""") failsWith "" << "bad argument #3 to 'move' (too many elements to move)"
program("""table.move({}, 1 << 63, -1, 0)""") failsWith "" << "bad argument #3 to 'move' (too many elements to move)"
program("""return table.move({}, 1, 1, 1, nil)""") succeedsWith (classOf[Table])
program("""local t = {}; local u = table.move(t, 1, 1, 1); return t == u""") succeedsWith (true)
program("""local a = {}; return table.move({10,20,30}, 1, 0, 3, a) == a""") succeedsWith (true)
// destination wrap around
program("""table.move({}, 1, (1 << 63) - 1, 2)""") failsWith ("" << "bad argument #4 to 'move' (destination wrap around)")
program("""table.move({}, 1, 2, (1 << 63) - 1)""") failsWith ("" << "bad argument #4 to 'move' (destination wrap around)")
program("""table.move({}, (1 << 63), -2, 2)""") failsWith ("" << "bad argument #4 to 'move' (destination wrap around)")
program(
"""local t = table.move({"a", "b", "c", "d"}, 1, 3, 4)
|return #t, t[1], t[2], t[3], t[4], t[5], t[6]
"""
) succeedsWith(6, "a", "b", "c", "a", "b", "c")
program(
"""local t = table.move({"a", "b", "c", "d"}, 1, 4, 2)
|return #t, t[1], t[2], t[3], t[4], t[5]
"""
) succeedsWith(5, "a", "a", "b", "c", "d")
program(
"""local t = table.move({"a", "b", "c", "d", "e"}, 2, 5, 0)
|return #t, t[0], t[1], t[2], t[3], t[4], t[5]
"""
) succeedsWith(5, "b", "c", "d", "e", "d", "e")
program(
"""-- same dest, no range overlap
|local log = ""
|local mt = {
| __index = function (t, k) log = log.."["..tostring(k).."]" end,
| __newindex = function (t, k, v) log = log.."{"..tostring(k).."}" end
|}
|
|table.move(setmetatable({}, mt), 1, 3, 4)
|return log
"""
) succeedsWith ("[1]{4}[2]{5}[3]{6}")
program(
"""-- same dest, range overlap
|local log = ""
|local mt = {
| __index = function (t, k) log = log.."["..tostring(k).."]" end,
| __newindex = function (t, k, v) log = log.."{"..tostring(k).."}" end
|}
|
|table.move(setmetatable({}, mt), 1, 3, 2)
|return log
"""
) succeedsWith ("[3]{4}[2]{3}[1]{2}")
program(
"""-- same dest, full overlap
|local log = ""
|local mt = {
| __index = function (t, k) log = log.."["..tostring(k).."]" end,
| __newindex = function (t, k, v) log = log.."{"..tostring(k).."}" end
|}
|
|table.move(setmetatable({}, mt), 1, 3, 1)
|return log
"""
) succeedsWith ("[1]{1}[2]{2}[3]{3}")
program(
"""-- other dest, range overlap
|local log = ""
|local mt = {
| __index = function (t, k) log = log.."["..tostring(k).."]" end,
| __newindex = function (t, k, v) log = log.."{"..tostring(k).."}" end
|}
|
|table.move(setmetatable({}, mt), 1, 3, 2, setmetatable({}, mt))
|return log
"""
) succeedsWith ("[1]{2}[2]{3}[3]{4}")
program(
"""-- other dest, no range overlap
|local log = ""
|local mt = {
| __index = function (t, k) log = log.."["..tostring(k).."]" end,
| __newindex = function (t, k, v) log = log.."{"..tostring(k).."}" end
|}
|
|table.move(setmetatable({}, mt), 1, 3, 4)
|return log
"""
) succeedsWith ("[1]{4}[2]{5}[3]{6}")
}
about("table.pack") {
program("""return table.pack(xx, yy, zz).n""") succeedsWith (3)
program("""return #table.pack(3, 2, 1, 0, -1)""") succeedsWith (5)
}
about("table.remove") {
program("""table.remove()""") failsWith "" << "bad argument #1 to 'remove' (table expected, got no value)"
program("""table.remove("")""") failsWith "" << "bad argument #1 to 'remove' (table expected, got string)"
program("""return table.remove({})""") succeedsWith (null)
program("""return table.remove({}, 1)""") succeedsWith (null)
program("""return table.remove({}, 0)""") succeedsWith (null)
program("""return table.remove({}, nil)""") succeedsWith (null)
program("""return table.remove({}, nil, "extra", "args", "ignored")""") succeedsWith (null)
program("""return table.remove({}, "1")""") succeedsWith (null)
program("""return table.remove({}, "1.0")""") succeedsWith (null)
program("""return table.remove({}, -0.0)""") succeedsWith (null)
program("""return table.remove({}, "-0.0")""") succeedsWith (null)
program("""table.remove({}, false)""") failsWith "" << "bad argument #2 to 'remove' (number expected, got boolean)"
program("""table.remove({}, "x")""") failsWith "" << "bad argument #2 to 'remove' (number expected, got string)"
program("""table.remove({}, 2)""") failsWith "" << "bad argument #2 to 'remove' (position out of bounds)"
program("""table.remove({}, -1)""") failsWith "" << "bad argument #2 to 'remove' (position out of bounds)"
program("""local t = {42}; local x = table.remove(t); return x, #t, t[1]""") succeedsWith(42, 0, null)
program("""return table.remove({42}, 0)""") failsWith "" << "bad argument #2 to 'remove' (position out of bounds)"
program("""local t = {42}; local x = table.remove(t, 1); return x, #t, t[1]""") succeedsWith(42, 0, null)
program("""local t = {42}; local x = table.remove(t, 2); return x, #t, t[1]""") succeedsWith(null, 1, 42)
program("""local t = {"a", "b", "c", "d"}; local x = table.remove(t); return x, #t, t[1], t[2], t[3], t[4]""") succeedsWith("d", 3, "a", "b", "c", null)
program("""local t = {"a", "b", "c", "d"}; local x = table.remove(t, 1); return x, t[1], t[2], t[3], t[4]""") succeedsWith("a", "b", "c", "d", null)
program("""local t = {"a", "b", "c", "d"}; local x = table.remove(t, 2); return x, t[1], t[2], t[3], t[4]""") succeedsWith("b", "a", "c", "d", null)
program("""local t = {"a", "b", "c", "d"}; local x = table.remove(t, 3); return x, t[1], t[2], t[3], t[4]""") succeedsWith("c", "a", "b", "d", null)
program("""local t = {"a", "b", "c", "d"}; local x = table.remove(t, 4); return x, t[1], t[2], t[3], t[4]""") succeedsWith("d", "a", "b", "c", null)
// the __len metamethod
// __len is always consulted when defined
program("""local t = setmetatable({}, {__len=function() error("Boom") end}); return table.remove(t)""") failsWith "" << "Boom"
program("""local t = setmetatable({"a"}, {__len=function() error("Boom") end}); return table.remove(t, 0)""") failsWith "" << "Boom"
program("""local t = setmetatable({"a"}, {__len=function() error("Boom") end}); return table.remove(t, 1)""") failsWith "" << "Boom"
program("""local t = setmetatable({"a"}, {__len=function() error("Boom") end}); return table.remove(t, 2)""") failsWith "" << "Boom"
// no shift variants (just erase the element at pos)
program("""local t = setmetatable({[0]="x", "a", "b"}, {__len=function() return 0 end}); return table.remove(t, 0), #t, t[0], t[1], t[2]""") succeedsWith("x", 0, null, "a", "b")
program("""local t = setmetatable({"a", "b", "c", "d"}, {__len=function() return 1 end}); return table.remove(t, 2), #t, t[1], t[2], t[3], t[4]""") succeedsWith("b", 1, "a", null, "c", "d")
// length is queried before the position is processed
program("""local t = setmetatable({"a"}, {__len=function() error("Boom") end}); table.remove(t, false)""") failsWith "" << "Boom"
program("""local t = setmetatable({}, {__len=function() return -1 end}); return table.remove(t)""") succeedsWith (null)
program("""local t = setmetatable({}, {__len=function() return -1 end}); return table.remove(t, -2)""") failsWith "" << "bad argument #2 to 'remove' (position out of bounds)"
program("""local t = setmetatable({}, {__len=function() return -1 end}); return table.remove(t, -1)""") succeedsWith (null)
program("""local t = setmetatable({}, {__len=function() return -1 end}); return table.remove(t, 0)""") failsWith "" << "bad argument #2 to 'remove' (position out of bounds)"
program("""local t = setmetatable({}, {__len=function() return -1 end}); return table.remove(t, 1)""") failsWith "" << "bad argument #2 to 'remove' (position out of bounds)"
// __index and __newindex
// no shift variants (just erase the element at pos); using the __index metamethod
program("""local t = setmetatable({[0]="x", "a", "b"}, {__len=function() return 0 end; __index=rawget}); return table.remove(t, 0), #t, t[0], t[1], t[2]""") succeedsWith("x", 0, null, "a", "b")
program("""local t = setmetatable({"a", "b", "c", "d"}, {__len=function() return 1 end; __index=rawget}); return table.remove(t, 2), #t, t[1], t[2], t[3], t[4]""") succeedsWith("b", 1, "a", null, "c", "d")
program("""local t = setmetatable({"x"}, {__index = error}); return table.remove(t)""") succeedsWith ("x")
program("""local t = setmetatable({nil, "x"}, {__len = function() return 2 end; __index = function(t,k) error(tostring(k)) end}); return table.remove(t, 1)""") failsWith ("" << "1")
program("""local t = setmetatable({"x", nil}, {__index = function(t,k) error(tostring(k)) end}); return table.remove(t, 2)""") failsWith ("" << "2")
program("""local t = setmetatable({nil, "x"}, {__len = function() return 2 end; __newindex = function(t,k,v) error(tostring(k)..tostring(v)) end}); return table.remove(t, 2)""") succeedsWith ("x")
program("""local t = setmetatable({nil, "x"}, {__len = function() return 2 end; __newindex = function(t,k,v) error(tostring(k)..tostring(v)) end}); return table.remove(t, 1)""") failsWith ("" << "1x")
program(
"""-- meta #1
|local ks = ""
|local n = 0
|local t = setmetatable({}, {
| __len = function() return n end;
| __newindex = function(t, k, v) n = n + 1; ks = ks.."["..k..tostring(v).."]"; rawset(t, k, v) end;
| __index = function(t, k) ks = ks.."{"..k.."}"; return rawget(t, k) end
|})
|
|t[1] = "a"
|t[3] = "c"
|t[2] = "b"
|t[4] = "d"
|n = n - 1
|rawset(t, 3, nil)
|local x = table.remove(t, 2)
|return ks, #t, x, t[1], t[2], t[3], t[4]
"""
) succeedsWith("[1a][3c][2b][4d]{3}[3nil]", 4, "b", "a", null, null, "d")
program(
"""-- meta #2
|local ks = ""
|local n = 0
|local t = setmetatable({}, {
| __len = function() return n end;
| __newindex = function(t, k, v) n = n + 1; ks = ks.."["..k..tostring(v).."]"; rawset(t, k, v) end;
| __index = function(t, k) ks = ks.."{"..k.."}"; return rawget(t, k) end
|})
|
|t[1] = "a"
|t[3] = "c"
|t[2] = "b"
|t[4] = "d"
|n = n - 1
|rawset(t, 3, nil)
|local x = table.remove(t, 2)
|local y = table.remove(t)
|return ks, #t, x, y, t[1], t[2], t[3], t[4]
"""
) succeedsWith("[1a][3c][2b][4d]{3}[3nil]", 4, "b", "d", "a", null, null, null)
}
about("table.sort") {
program("""table.sort()""") failsWith "" << "bad argument #1 to 'sort' (table expected, got no value)"
program("""table.sort(nil)""") failsWith "" << "bad argument #1 to 'sort' (table expected, got nil)"
program("""table.sort("hello")""") failsWith "" << "bad argument #1 to 'sort' (table expected, got string)"
// comparator defined, but not called
program("""table.sort({}, nil)""") succeedsWith()
program("""table.sort({}, 42)""") succeedsWith()
program("""local t = {2, 1}; table.sort(t); return t[1], t[2]""") succeedsWith(1, 2)
program("""local t = {2, 1}; table.sort(t, nil); return t[1], t[2]""") succeedsWith(1, 2)
program("""table.sort({3, 2}, 1)""") failsWith "" << "bad argument #2 to 'sort' (function expected, got number)"
program("""table.sort({true, false})""") failsWith "" << "attempt to compare two boolean values"
program("""table.sort({1, false})""") failsWith "" << "attempt to compare " << "boolean with number"
// "array too big" errors: length must fit into a signed 32-bit integer
program("""local a = setmetatable({}, {__len = function () return (1 << 63) - 1 end}); table.sort(a)""") failsWith "" << "bad argument #1 to 'sort' (array too big)"
program("""local a = setmetatable({}, {__len = function () return (1 << 31) - 1 end}); table.sort(a)""") failsWith "" << "bad argument #1 to 'sort' (array too big)"
// ok when length < Integer.MAX_VALUE
program("""local a = setmetatable({}, {__len = function () return (1 << 31) - 2 end}); table.sort(a, function() error("BOOM!") end)""") failsWith "" << "BOOM!"
def doSortExplicit(vals: Seq[Any], exp: Seq[Any], comp: Option[String]): Unit = {
val vs = vals map {
case s: String => LuaFormat.escape(s)
case other => other
}
val ctor = vs.mkString("{", ",", "}")
val getter = ((1 to vals.size) map {
"t[" + _.toString + "]"
}).mkString(", ")
val compStr = comp match {
case Some(s) => ", " + s
case None => ""
}
program("local t = " + ctor + "; table.sort(t" + compStr + "); return " + getter).succeedsWith(exp: _*)
}
def doSort(vals: Any*): Unit = {
val exp = vals sortWith {
case (a: String, b: String) => a.compareTo(b) < 0
case (a: Number, b: Number) => org.classdump.luna.Ordering.NUMERIC.lt(a, b)
case _ => false
}
doSortExplicit(vals, exp, None)
}
doSort("a", "b", "c", "d")
doSort("d", "c", "b", "a")
doSort((50 to 120): _*)
doSort(30.to(4, -2): _*)
doSort(1.1, 1.3, 1.0, 1.2, 2.0, 1.0)
// doSortExplicit(Seq("hello", "hi", "hola"), Seq("hi", "hola", "hello"), Some("function(a, b) return #a < #b end"))
doSort(3, 8, 5, 4, 6)
program(
"""local t = {"one", "thirteen", "three", "four", "eleven"}
|table.sort(t, function(a, b) return #a < #b end)
|return t[1], t[2], t[3], t[4], t[5]
""") succeedsWith("one", "four", "three", "eleven", "thirteen")
// full stops
program(
"""-- total mt access
|local t = {"one", "thirteen", "three", "four", "eleven"}
|local mt = {
| __len = function (t) return rawlen(t) end,
| __index = function (t, k) return rawget(t, k) end,
| __newindex = function (t, k, v) return rawset(t, k, v) end
|}
|table.sort(setmetatable(t, mt), function(a, b) return #a < #b end)
|return t[1], t[2], t[3], t[4], t[5]
""") succeedsWith("one", "four", "three", "eleven", "thirteen")
// proxy
program(
"""-- proxy
|local function proxy(t)
| return setmetatable({}, {
| __len = function (_t) return #t end,
| __index = function (_t, k) return t[k] end,
| __newindex = function (_t, k, v) t[k] = v end
| })
|end
|
|local t = {5, 3, 2, 6, 1, 4}
|table.sort(proxy(t))
|return t[1], t[2], t[3], t[4], t[5], t[6]
"""
) succeedsWith(1, 2, 3, 4, 5, 6)
// comparator must be a function
program("""local f = setmetatable({}, {__call=function() return true end}); table.sort({2, 1}, f)""") failsWith "" << "bad argument #2 to 'sort' (function expected, got table)"
// invalid order function
program("""table.sort({1, 2, 3, 4}, function(a,b) return true end)""") failsWith "" << "invalid order function for sorting"
// PUC-Lua does not detect the invalid order function in this case, and neither do we
program("""table.sort({1, 2, 3, 4}, function(a,b) return false end)""") succeedsWith()
}
about("table.unpack") {
program("""return table.unpack()""") failsWith "" << "attempt to get length of a nil value"
program("""return table.unpack(1)""") failsWith "" << "attempt to get length of a number value"
program("""return table.unpack(1,2)""") failsWith "" << "attempt to get length of a number value"
program("""return table.unpack(1,2,3)""") failsWith "" << "attempt to index a number value"
program("""return table.unpack(1,2.3)""") failsWith "" << "bad argument #2 to 'unpack' (number has no integer representation)"
program("""return table.unpack(1,"x")""") failsWith "" << "bad argument #2 to 'unpack' (number expected, got string)"
program("""return table.unpack(1,2,"y")""") failsWith "" << "bad argument #3 to 'unpack' (number expected, got string)"
program("""local x; return table.unpack(1,x)""") failsWith "" << "attempt to get length of a number value"
program("""return table.unpack({1,2,3}, -2, 2)""") succeedsWith(null, null, null, 1, 2)
program("""return table.unpack({3,2,1}, 3, 3)""") succeedsWith (1)
program("""return table.unpack({3,2,1}, 0, 1)""") succeedsWith(null, 3)
program("""return table.unpack({3,2,1}, 0, -1)""") succeedsWith()
program("""return table.unpack({3,2,1}, 10, 12)""") succeedsWith(null, null, null)
program("""return table.unpack({3,2,1,0,-1}, 2)""") succeedsWith(2, 1, 0, -1)
program("""return table.unpack({1,0,-1})""") succeedsWith(1, 0, -1)
program("""return table.unpack("nono")""") failsWith "" << "attempt to index a string value"
program("""local maxi = (1 << 31) - 1; table.unpack({}, 0, maxi)""") failsWith "" << "too many results to unpack"
program("""local maxi = (1 << 31) - 1; table.unpack({}, 1, maxi)""") failsWith "" << "too many results to unpack"
program("""local maxI = (1 << 63) - 1; table.unpack({}, 0, maxI)""") failsWith "" << "too many results to unpack"
program("""local maxI = (1 << 63) - 1; table.unpack({}, 1, maxI)""") failsWith "" << "too many results to unpack"
program("""local mini, maxi = -(1 << 31), (1 << 31) - 1; table.unpack({}, mini, maxi)""") failsWith "" << "too many results to unpack"
program("""local minI, maxI = 1 << 63, (1 << 63) - 1; table.unpack({}, minI, maxI)""") failsWith "" << "too many results to unpack"
// behaviour near math.maxinteger
program(
"""local maxI = (1 << 63) - 1
|local t = {[maxI - 1] = 12, [maxI] = 23}
|return table.unpack(t, maxI - 1, maxI)
""") succeedsWith(12, 23)
program(
"""local maxI = (1 << 63) - 1
|local t = setmetatable({}, {__index = function(t,k) return k end})
|return table.unpack(t, maxI - 1, maxI)
""") succeedsWith(Long.MaxValue - 1, Long.MaxValue)
in(FullContext) {
program("""return table.unpack("hello")""") succeedsWith(null, null, null, null, null)
program("""return table.unpack("1","2","3")""") succeedsWith(null, null)
}
about("__len metamethod") {
program("""return table.unpack(setmetatable({}, { __len = 3 }))""") failsWith "" << "attempt to call a number value"
program("""return table.unpack(setmetatable({}, { __len = function() error("boom") end }), -1, 1)""") succeedsWith(null, null, null)
program("""return table.unpack(setmetatable({}, { __len = function() error("boom") end }), -1)""") failsWithLuaError "boom"
program("""return table.unpack(setmetatable({}, { __len = function() return 2 end }), -1)""") succeedsWith(null, null, null, null)
program("""return table.unpack(setmetatable({}, { __len = function() return "2" end }), -1)""") succeedsWith(null, null, null, null)
program("""return table.unpack(setmetatable({}, { __len = function() return "2.0" end }), -1)""") succeedsWith(null, null, null, null)
program("""return table.unpack(setmetatable({}, { __len = function() return "boo" end }), 1)""") failsWith "" << "object length is not an integer"
}
about("__index metamethod") {
program("""return table.unpack(setmetatable({}, { __index = 3 }), 1, 2)""") failsWith "" << "attempt to index a number value"
program(
"""local mt = { __index = function(t, k) local x = k or 0; return x * x end }
|return table.unpack(setmetatable({}, mt), -1, 3)
""") succeedsWith(1, 0, 1, 4, 9)
}
about("__len and __index metamethods") {
program(
"""local mt = {
| __index = function(t, k) local x = k or 0; return x * x end,
| __len = function() return 5 end
|}
|return table.unpack(setmetatable({}, mt))
""") succeedsWith(1, 4, 9, 16, 25)
}
}
}
}
| kroepke/luna | luna-tests/src/test/scala/org/classdump/luna/test/fragments/TableLibFragments.scala | Scala | apache-2.0 | 36,350 |
package com.mindcandy.waterfall.io
import java.util.concurrent.TimeUnit
import akka.actor.ActorSystem
import akka.util.Timeout
import com.mindcandy.waterfall.RowSeparator._
import com.mindcandy.waterfall.intermediate.FileIntermediate
import com.mindcandy.waterfall.{ IOConfig, IOOps, IOSource, Intermediate, IntermediateFormat }
import com.typesafe.scalalogging.slf4j.Logging
import spray.client.pipelining._
import spray.http._
import scala.concurrent.duration.Duration
import scala.concurrent.{ Await, ExecutionContext, Future }
import scala.util.Try
case class HttpIOConfig(url: String, timeout: Int = 5000) extends IOConfig
case class HttpIOSource[A <: AnyRef](config: HttpIOConfig, override val columnSeparator: Option[String] = None, val rowSeparator: RowSeparator = NewLine)
extends IOSource[A]
with IOOps[A] {
def retrieveInto[I <: Intermediate[A]](intermediate: I)(implicit format: IntermediateFormat[A]) = {
val inputContent = rowSeparator match {
case NewLine => fileContent.map {
_.lines.map(fromLine(_))
}
case NoSeparator => {
fileContent.map {
_.lines.mkString("") match {
case combinedData if !combinedData.isEmpty => Iterator(fromLine(combinedData))
case _ => Iterator[A]()
}
}
}
}
system.shutdown()
system.awaitTermination()
inputContent.flatMap { content =>
intermediate.write(content).map { _ =>
logger.info("Retrieving into %s from %s completed".format(intermediate, config))
}
}
}
implicit val system = ActorSystem("waterfall-httpio")
implicit val executionContext: ExecutionContext = system.dispatcher
implicit val timeout = Timeout(config.timeout, TimeUnit.MILLISECONDS)
val pipeline: HttpRequest => Future[String] = sendReceive ~> unmarshal[String]
private[this] def fileContent: Try[String] = Try {
Await.result(pipeline(Get(config.url)), Duration.Inf)
}
}
trait MultipleHttpIOConfig extends IOConfig {
def urls: List[String]
def combinedFileUrl: String
def timeout: Int = 5000
override def url = urls.mkString(";")
override def toString = "MultipleHttpIOConfig(%s)".format(urls)
}
case class MultipleHttpIOSource[A <: AnyRef](config: MultipleHttpIOConfig) extends IOSource[A] with Logging {
def retrieveInto[I <: Intermediate[A]](intermediate: I)(implicit format: IntermediateFormat[A]) = {
val combinedIntermediate = FileIntermediate[A](config.combinedFileUrl)
val result = generateHttpIOConfigs(config).foldLeft(Try(())) { (previousResult, httpIOConfig) =>
previousResult.flatMap { _ =>
HttpIOSource[A](httpIOConfig).retrieveInto(combinedIntermediate)(format)
}
}
result.flatMap { _ =>
combinedIntermediate.read(intermediate.write)(format).map { _ =>
logger.info("Retrieving into %s from %s completed".format(intermediate, config))
}
}
}
def generateHttpIOConfigs(config: MultipleHttpIOConfig) = {
config.urls.map { url => HttpIOConfig(url, config.timeout) }
}
} | mindcandy/waterfall | src/main/scala/com/mindcandy/waterfall/io/HttpIO.scala | Scala | mit | 3,049 |
/*
* Copyright 2013-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package laika.io
import java.io.File
import scala.io.Codec
import laika.tree.Paths.Path
import laika.tree.Paths.Root
import laika.template.ParseTemplate
import laika.template.DefaultTemplate
import laika.directive.Directives.Templates
import laika.parse.css.ParseStyleSheet
/** Represents a tree structure of Inputs, abstracting over various types of IO resources.
*
* While the default implementations wrap the structure of directories in the file system,
* other implementations may build an entirely virtual input tree structure.
*
* @author Jens Halm
*/
trait InputProvider {
/** The local name of the tree represented by this provider.
*/
lazy val name: String = path.name
/** The full path of the tree represented by this provider.
* This path is always an absolute path
* from the root of the (virtual) input tree,
* therefore does not represent the filesystem
* path in case of file I/O.
*/
def path: Path
/** All inputs for configuration files
* on this level of the input hierarchy.
*/
def configDocuments: Seq[Input]
/** All inputs for markup documents
* that need to be parsed
* on this level of the input hierarchy.
*/
def markupDocuments: Seq[Input]
/** All inputs for dynamic files
* that need to be processed
* on this level of the input hierarchy.
*/
def dynamicDocuments: Seq[Input]
/** All inputs for style sheets
* that need to be processed
* on this level of the input hierarchy,
* mapped to their format.
*/
def styleSheets: Map[String,Seq[Input]]
/** All inputs for static files
* that need to be copied
* from this level of the input hierarchy.
*/
def staticDocuments: Seq[Input]
/** All inputs for template files
* on this level of the input hierarchy.
*/
def templates: Seq[Input]
/** All subtrees of this provider.
*/
def subtrees: Seq[InputProvider]
/** The paths this provider has been created from
* or an empty list if this input provider does
* not originate from the file system.
*/
def sourcePaths: Seq[String]
}
/** Factory methods for creating `InputProvider` and `InputConfigBuilder`
* instances. The latter offers fine grained control like setting
* custom document type matchers or custom template engines, before
* creating the actual `InputProvider`.
*/
object InputProvider {
type FileFilter = File => Boolean
private class DirectoryInputProvider (dirs: Seq[File], val path: Path, exclude: FileFilter, docTypeMatcher: Path => DocumentType, codec: Codec) extends InputProvider {
import DocumentType._
private def docType (f: File) = docTypeMatcher(path / f.getName)
private def toInput (pair: (DocumentType,File)) = Input.fromFile(pair._2, path)(codec)
private lazy val files = {
def filesInDir (dir: File) = dir.listFiles filter (f => f.isFile && !exclude(f))
dirs flatMap filesInDir map (f => (docType(f), f)) groupBy (_._1) withDefaultValue Nil
}
private def documents (docType: DocumentType) = files(docType).map(toInput)
lazy val configDocuments: Seq[Input] = documents(Config)
lazy val markupDocuments: Seq[Input] = documents(Markup)
lazy val dynamicDocuments: Seq[Input] = documents(Dynamic)
lazy val styleSheets: Map[String, Seq[Input]] = files collect { case p@(StyleSheet(format), pairs) => (format, pairs map toInput) }
lazy val staticDocuments: Seq[Input] = documents(Static)
lazy val templates: Seq[Input] = documents(Template)
lazy val sourcePaths: Seq[String] = dirs map (_.getAbsolutePath)
lazy val subtrees: Seq[InputProvider] = {
def subDirs (dir: File) = dir.listFiles filter (f => f.isDirectory && !exclude(f) && docType(f) != Ignored)
val byName = (dirs flatMap subDirs groupBy (_.getName)).values
byName map (subs => new DirectoryInputProvider(subs, path / subs.head.getName, exclude, docTypeMatcher, codec)) toList
}
}
/** Creates an InputProvider based on the specified directory, including
* all subdirectories.
*
* @param root the root directory of the input tree
* @param exclude the files to exclude from processing
* @param docTypeMatcher a function determining the document type based on the path of the input
* @param codec the character encoding of the files, if not specified the platform default will be used
*/
def forRootDirectory (root: File, exclude: FileFilter, docTypeMatcher: Path => DocumentType)(implicit codec: Codec): InputProvider =
forRootDirectories(Seq(root), exclude, docTypeMatcher)(codec)
/** Creates an InputProvider based on the specified directories, including
* all subdirectories. The directories will be merged into a tree with a single
* root. If any of the specified root directories contain sub-directories with
* the same name, these sub-directories will be merged, too.
*
* @param roots the root directories of the input tree
* @param exclude the files to exclude from processing
* @param docTypeMatcher a function determining the document type based on the path of the input
* @param codec the character encoding of the files, if not specified the platform default will be used
*/
def forRootDirectories (roots: Seq[File], exclude: FileFilter, docTypeMatcher: Path => DocumentType)(implicit codec: Codec): InputProvider = {
require(roots.nonEmpty, "The specified roots sequence must contain at least one directory")
for (root <- roots) {
require(root.exists, s"Directory ${root.getAbsolutePath} does not exist")
require(root.isDirectory, s"File ${root.getAbsolutePath} is not a directory")
}
new DirectoryInputProvider(roots, Root, exclude, docTypeMatcher, codec)
}
/** The configuration for an input tree, consisting of the actual provider for
* all inputs, a separate input for (optional) root configuration sources,
* the template and style sheet engines to use and a flag whether parsing should be performed
* in parallel.
*/
case class InputConfig (provider: InputProvider, config: Seq[Input], templateParser: ParseTemplate, styleSheetParser: ParseStyleSheet, parallel: Boolean)
/** Responsible for building new InputProviders based
* on the specified document type matcher and codec.
*/
trait ProviderBuilder {
def build (docTypeMatcher: Path => DocumentType, codec: Codec): InputProvider
}
private[InputProvider] class DirectoryProviderBuilder (roots: Seq[File], exclude: FileFilter) extends ProviderBuilder {
def build (docTypeMatcher: Path => DocumentType, codec: Codec): InputProvider =
InputProvider.forRootDirectories(roots, exclude, docTypeMatcher)(codec)
}
/** A filter that selects files that are hidden according to `java.io.File.isHidden`.
*/
val hiddenFileFilter: FileFilter = file => file.isHidden && file.getName != "."
/** API for configuring an input tree.
* Gives access to all relevant aspects of traversing, parsing and processing
* a tree of inputs.
*/
class InputConfigBuilder (
provider: ProviderBuilder,
codec: Codec,
docTypeMatcher: Option[Path => DocumentType] = None,
templateParser: Option[ParseTemplate] = None,
styleSheetParser: Option[ParseStyleSheet] = None,
config: List[Input] = Nil,
isParallel: Boolean = false) {
/** Specifies the style sheet engine to use for
* parsing all CSS inputs found in the tree.
*/
def withStyleSheetParser (parser: ParseStyleSheet): InputConfigBuilder =
new InputConfigBuilder(provider, codec, docTypeMatcher, templateParser, Some(parser), config, isParallel)
/** Specifies the template engine to use for
* parsing all template inputs found in the tree.
*/
def withTemplateParser (parser: ParseTemplate): InputConfigBuilder =
new InputConfigBuilder(provider, codec, docTypeMatcher, Some(parser), styleSheetParser, config, isParallel)
/** Specifies custom template directives to use with
* the default template engine.
*/
def withTemplateDirectives (directives: Templates.Directive*): InputConfigBuilder =
withTemplateParser(ParseTemplate as DefaultTemplate.withDirectives(directives:_*))
/** Specifies the function to use for determining the document type
* of the input based on its path.
*/
def withDocTypeMatcher (matcher: Path => DocumentType): InputConfigBuilder =
new InputConfigBuilder(provider, codec, Some(matcher), templateParser, styleSheetParser, config, isParallel)
/** Specifies a root configuration file that gets
* inherited by this tree and its subtrees.
* The syntax of the input is expected to be of a format
* compatible with the Typesafe Config library.
*/
def withConfigFile (file: File): InputConfigBuilder = withConfigInput(Input.fromFile(file)(codec))
/** Specifies the name of a root configuration file that gets
* inherited by this tree and its subtrees.
* The syntax of the input is expected to be of a format
* compatible with the Typesafe Config library.
*/
def withConfigFile (name: String): InputConfigBuilder = withConfigInput(Input.fromFile(name)(codec))
/** Specifies a root configuration source that gets
* inherited by this tree and its subtrees.
* The syntax of the input is expected to be of a format
* compatible with the Typesafe Config library.
*/
def withConfigString (source: String): InputConfigBuilder = withConfigInput(Input.fromString(source))
private def withConfigInput (input: Input): InputConfigBuilder =
new InputConfigBuilder(provider, codec, docTypeMatcher, templateParser, styleSheetParser, input :: config, isParallel)
/** Instructs the parser to process all inputs in parallel.
* The recursive structure of inputs will be flattened before parsing
* and then get reassembled afterwards, therefore the parallel processing
* includes all subtrees of this input tree.
*/
def inParallel: InputConfigBuilder = new InputConfigBuilder(provider, codec, docTypeMatcher, templateParser, styleSheetParser, config, true) // TODO - custom TaskSupport
/** Builds the final configuration for this input tree
* for the specified parser factory.
*
* @param markupSuffixes all suffixes recognized by the parsers configured to consume this input
*/
def build (markupSuffixes: Set[String]): InputConfig = {
val matcher = docTypeMatcher getOrElse new DefaultDocumentTypeMatcher(markupSuffixes)
val templates = templateParser getOrElse ParseTemplate
val styleSheets = styleSheetParser getOrElse ParseStyleSheet
InputConfig(provider.build(matcher, codec), config, templates, styleSheets, isParallel)
}
}
/** Creates InputConfigBuilder instances for a specific root directory in the file system.
*/
object Directory {
def apply (name: String)(implicit codec: Codec): InputConfigBuilder = apply(new File(name), hiddenFileFilter)(codec)
def apply (name: String, exclude: FileFilter)(implicit codec: Codec): InputConfigBuilder = apply(new File(name), exclude)(codec)
def apply (file: File)(implicit codec: Codec): InputConfigBuilder = apply(file, hiddenFileFilter)(codec)
def apply (file: File, exclude: FileFilter)(implicit codec: Codec): InputConfigBuilder = new InputConfigBuilder(new DirectoryProviderBuilder(Seq(file), exclude), codec)
}
/** Creates InputConfigBuilder instances for several root directories in the file system
* which will be merged into a tree with a single root.
*/
object Directories {
def apply (roots: Seq[File])(implicit codec: Codec): InputConfigBuilder = apply(roots, hiddenFileFilter)(codec)
def apply (roots: Seq[File], exclude: FileFilter)(implicit codec: Codec): InputConfigBuilder = new InputConfigBuilder(new DirectoryProviderBuilder(roots, exclude), codec)
}
/** Creates InputConfigBuilder instances using the current working directory as its root.
*/
object DefaultDirectory {
def apply (exclude: FileFilter = hiddenFileFilter)(implicit codec: Codec): InputConfigBuilder = Directory(System.getProperty("user.dir"), exclude)(codec)
}
}
| amuramatsu/Laika | core/src/main/scala/laika/io/InputProvider.scala | Scala | apache-2.0 | 13,026 |
package com.generativists
package object thirdway {
type RNG = scala.util.Random
}
| generativists/ThirdWay | src/main/scala/com/generativists/thirdway/package.scala | Scala | mit | 86 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.rules.physical.stream
import org.apache.flink.table.plan.`trait`.FlinkRelDistribution
import org.apache.flink.table.plan.nodes.FlinkConventions
import org.apache.flink.table.plan.nodes.logical.FlinkLogicalSort
import org.apache.flink.table.plan.nodes.physical.stream.StreamExecSortLimit
import org.apache.calcite.plan.{RelOptRule, RelOptRuleCall}
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.convert.ConverterRule
/**
* Rule that matches [[FlinkLogicalSort]] with non-empty sort fields and non-null fetch or offset,
* and converts it to [[StreamExecSortLimit]].
*/
class StreamExecSortLimitRule
extends ConverterRule(
classOf[FlinkLogicalSort],
FlinkConventions.LOGICAL,
FlinkConventions.STREAM_PHYSICAL,
"StreamExecSortLimitRule") {
override def matches(call: RelOptRuleCall): Boolean = {
val sort: FlinkLogicalSort = call.rel(0)
// only matches Sort with non-empty sort fields and non-null fetch
!sort.getCollation.getFieldCollations.isEmpty && (sort.fetch != null || sort.offset != null)
}
override def convert(rel: RelNode): RelNode = {
val sort: FlinkLogicalSort = rel.asInstanceOf[FlinkLogicalSort]
val input = sort.getInput
val requiredTraitSet = input.getTraitSet
.replace(FlinkRelDistribution.SINGLETON)
.replace(FlinkConventions.STREAM_PHYSICAL)
val providedTraitSet = sort.getTraitSet
.replace(FlinkRelDistribution.SINGLETON)
.replace(FlinkConventions.STREAM_PHYSICAL)
val newInput: RelNode = RelOptRule.convert(input, requiredTraitSet)
new StreamExecSortLimit(
rel.getCluster,
providedTraitSet,
newInput,
sort.collation,
sort.offset,
sort.fetch)
}
}
object StreamExecSortLimitRule {
val INSTANCE: RelOptRule = new StreamExecSortLimitRule
}
| shaoxuan-wang/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/plan/rules/physical/stream/StreamExecSortLimitRule.scala | Scala | apache-2.0 | 2,654 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.nn.abstractnn.{Initializable, TensorModule}
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.RandomGenerator._
import com.intel.analytics.bigdl.utils.{T, Table}
import scala.reflect.ClassTag
/**
* [[Cosine]] calculates the cosine similarity of the input to k mean centers.
* The input given in `forward(input)` must be either
* a vector (1D tensor) or matrix (2D tensor). If the input is a vector, it must
* have the size of `inputSize`. If it is a matrix, then each row is assumed to be
* an input sample of given batch (the number of rows means the batch size and
* the number of columns should be equal to the `inputSize`).
*
* @param inputSize the size of each input sample
* @param outputSize the size of the module output of each sample
*/
@SerialVersionUID(- 8739169489135761430L)
class Cosine[T: ClassTag](val inputSize : Int, val outputSize : Int)(
implicit ev: TensorNumeric[T]) extends TensorModule[T] with Initializable {
val gradWeight = Tensor[T](outputSize, inputSize)
val weight = Tensor[T](outputSize, inputSize)
@transient
var _weightNorm: Tensor[T] = null
@transient
var _inputNorm: Tensor[T] = null
@transient
var __norm: T = ev.fromType(0)
@transient
var _sum: Tensor[T] = null
@transient
var _weight: Tensor[T] = null
@transient
var _gradOutput: Tensor[T] = null
{
val stdv = 1 / math.sqrt(weight.size(1))
val wInit: InitializationMethod = RandomUniform(-stdv, stdv)
setInitMethod(weightInitMethod = wInit)
}
override def reset(): Unit = {
weightInitMethod.init(weight, VariableFormat.OUT_IN)
zeroGradParameters()
}
override def updateOutput(input: Tensor[T]): Tensor[T] = {
require(input.dim() == 1 || input.dim() == 2,
"Cosine: " + ErrorInfo.constrainInputAsVectorOrBatch)
if (null == _weightNorm) _weightNorm = Tensor[T]()
if (null == _inputNorm) _inputNorm = Tensor[T]()
if (null == _sum) _sum = Tensor[T]()
if (null == _weight) _weight = Tensor[T]()
if (null == _gradOutput) _gradOutput = Tensor[T]()
weight.norm(_weightNorm, 2, 2)
_weightNorm.add(ev.fromType(1e-12))
if (input.dim() == 1) {
output.resize(outputSize).zero()
output.addmv(ev.fromType(1), weight, input)
__norm = ev.plus(input.norm(2), ev.fromType(1e-12))
output.cdiv(_weightNorm.view(outputSize)).div(__norm)
} else if (input.dim() == 2) {
val batchSize = input.size(1)
val nElement = output.nElement()
output.resize(batchSize, outputSize)
if (output.nElement() != nElement) output.zero()
output.addmm(ev.fromType(0), output, ev.fromType(1), input, weight.t())
input.norm(_inputNorm, 2, 2)
output.cdiv(_weightNorm.view(1, outputSize).expandAs(output))
output.cdiv(Tensor[T](_inputNorm.storage(), _inputNorm.storageOffset(),
_inputNorm.size(), _inputNorm.stride()).expandAs(output))
}
output
}
override def updateGradInput(input: Tensor[T], gradOutput: Tensor[T]) : Tensor[T] = {
require(input.dim() == 1 || input.dim() == 2,
"Cosine: " + ErrorInfo.constrainInputAsVectorOrBatch)
val nElement = gradInput.nElement()
gradInput.resizeAs(input)
if (gradInput.nElement() != nElement) gradInput.zero()
if (input.dim() == 1) {
_weight.resizeAs(weight).copy(weight)
_weight.cdiv(Tensor[T](_weightNorm.storage(), _weightNorm.storageOffset(),
_weightNorm.size(), _weightNorm.stride()).expandAs(weight))
_weight.div(__norm)
_weight.addr(ev.fromType(1), _weight, ev.divide(ev.fromType(-1),
ev.times(__norm, __norm)), output, input)
gradInput.addmv(ev.fromType(0), ev.fromType(1), _weight.t(), gradOutput)
} else if (input.dim() == 2) {
val inputNorm = _inputNorm.expandAs(input)
val weightNorm = _weightNorm.view(1, outputSize).expandAs(gradOutput)
gradInput.copy(input).cdiv(inputNorm)
_gradOutput.resizeAs(gradOutput).copy(gradOutput)
_gradOutput.cmul(output)
_sum.sum(_gradOutput, 2)
gradInput.cmul(_sum.expandAs(input))
_gradOutput.resizeAs(gradOutput).copy(gradOutput)
_gradOutput.cdiv(weightNorm)
gradInput.addmm(ev.fromType(-1), gradInput, ev.fromType(1), _gradOutput, weight)
gradInput.cdiv(inputNorm)
}
gradInput
}
override def accGradParameters(input: Tensor[T], gradOutput: Tensor[T]): Unit = {
require(input.dim() == 1 || input.dim() == 2,
"Cosine: " + ErrorInfo.constrainInputAsVectorOrBatch)
if (input.dim() == 1 && scaleW != 0) {
_gradOutput.resizeAs(gradOutput).copy(gradOutput)
var weightNorm = Tensor[T]()
weightNorm = _weightNorm.view(outputSize)
_gradOutput.cdiv(weightNorm)
gradWeight.addr(ev.divide(ev.fromType[Double](scaleW), __norm), _gradOutput, input)
_gradOutput.cdiv(weightNorm)
_gradOutput.cmul(output)
_weight.resizeAs(weight).copy(weight)
_weight.cmul(_gradOutput.view(outputSize, 1).expandAs(weight))
gradWeight.add(ev.fromType[Double](-scaleW), _weight)
} else if (input.dim() == 2) {
_weight.resizeAs(weight).copy(weight)
_gradOutput.resizeAs(gradOutput).copy(gradOutput)
_gradOutput.cmul(output)
_sum.sum(_gradOutput, 1)
val grad = _sum(1)
grad.cdiv(_weightNorm.select(2, 1))
_weight.cmul(grad.view(outputSize, 1).expandAs(_weight))
val input_ = _gradOutput
input_.resizeAs(input).copy(input)
input_.cdiv(_inputNorm.expandAs(input))
_weight.addmm(ev.fromType(-1), _weight, ev.fromType(1), gradOutput.t(), input_)
_weight.cdiv(_weightNorm.expandAs(_weight))
gradWeight.add(_weight)
}
}
override def zeroGradParameters(): Unit = {
gradWeight.zero()
}
override def parameters(): (Array[Tensor[T]], Array[Tensor[T]]) = {
(Array(this.weight), Array(this.gradWeight))
}
override def getParametersTable(): Table = {
T(getName() -> T("weight" -> weight, "gradWeight" -> gradWeight))
}
override def toString(): String = {
s"${getPrintName}($inputSize, $outputSize)"
}
override def canEqual(other: Any): Boolean = other.isInstanceOf[Contiguous[T]]
override def equals(other: Any): Boolean = other match {
case that: Cosine[T] =>
super.equals(that) &&
(that canEqual this) &&
weight == that.weight &&
inputSize == that.inputSize &&
outputSize == that.outputSize
case _ => false
}
override def hashCode(): Int = {
def getHashCode(a: Any): Int = if (a == null) 0 else a.hashCode()
val state = Seq(super.hashCode(), weight, inputSize, outputSize)
state.map(getHashCode).foldLeft(0)((a, b) => 37 * a + b)
}
}
object Cosine {
def apply[@specialized(Float, Double) T: ClassTag](
inputSize : Int,
outputSize : Int)(implicit ev: TensorNumeric[T]) : Cosine[T] = {
new Cosine[T](inputSize, outputSize)
}
}
| JerryYanWan/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/nn/Cosine.scala | Scala | apache-2.0 | 7,666 |
/*
* Copyright (C) 2014 Romain Reuillon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openmole.plugin.method.evolution
import cats.implicits._
import monocle.macros.{ GenLens, Lenses }
import org.openmole.core.dsl._
import org.openmole.core.dsl.extension._
import org.openmole.plugin.method.evolution.Genome.Suggestion
import org.openmole.plugin.method.evolution.data._
import squants.time.Time
import monocle._
import monocle.syntax.all._
object NSGA2 {
object DeterministicParams {
import mgo.evolution.algorithm.{ CDGenome, NSGA2 ⇒ MGONSGA2, _ }
implicit def integration: MGOAPI.Integration[DeterministicParams, (Vector[Double], Vector[Int]), Phenotype] = new MGOAPI.Integration[DeterministicParams, (Vector[Double], Vector[Int]), Phenotype] {
type G = CDGenome.Genome
type I = CDGenome.DeterministicIndividual.Individual[Phenotype]
type S = EvolutionState[Unit]
def iManifest = implicitly
def gManifest = implicitly
def sManifest = implicitly
def operations(om: DeterministicParams) = new Ops {
def startTimeLens = GenLens[S](_.startTime)
def generationLens = GenLens[S](_.generation)
def evaluatedLens = GenLens[S](_.evaluated)
def genomeValues(genome: G) = MGOAPI.paired(CDGenome.continuousValues.get _, CDGenome.discreteValues.get _)(genome)
def buildGenome(v: (Vector[Double], Vector[Int])): G = CDGenome.buildGenome(v._1, None, v._2, None)
def buildGenome(vs: Vector[Variable[_]]) = buildGenome(Genome.fromVariables(vs, om.genome))
def buildIndividual(genome: G, phenotype: Phenotype, context: Context) = CDGenome.DeterministicIndividual.buildIndividual(genome, phenotype)
def initialState = EvolutionState[Unit](s = ())
override def metadata(generation: Long, saveOption: SaveOption) =
EvolutionMetadata.NSGA2(
genome = MetadataGeneration.genomeData(om.genome),
objective = om.objectives.map(MetadataGeneration.objectiveData),
populationSize = om.mu,
generation = generation,
saveOption = saveOption
)
def result(population: Vector[I], state: S, keepAll: Boolean, includeOutputs: Boolean) = FromContext { p ⇒
import p._
val res = MGONSGA2.result[Phenotype](population, Genome.continuous(om.genome), Objective.toFitnessFunction(om.phenotypeContent, om.objectives).from(context), keepAll = keepAll)
val genomes = GAIntegration.genomesOfPopulationToVariables(om.genome, res.map(_.continuous) zip res.map(_.discrete), scale = false)
val fitness = GAIntegration.objectivesOfPopulationToVariables(om.objectives, res.map(_.fitness))
val outputsValues = if (includeOutputs) DeterministicGAIntegration.outputValues(om.phenotypeContent, res.map(_.individual.phenotype)) else Seq()
genomes ++ fitness ++ outputsValues
}
def initialGenomes(n: Int, rng: scala.util.Random) = FromContext { p ⇒
import p._
val continuous = Genome.continuous(om.genome)
val discrete = Genome.discrete(om.genome)
val rejectValue = om.reject.map(f ⇒ GAIntegration.rejectValue[G](f, om.genome, _.continuousValues.toVector, _.discreteValues.toVector).from(context))
MGONSGA2.initialGenomes(n, continuous, discrete, rejectValue, rng)
}
def breeding(individuals: Vector[I], n: Int, s: S, rng: scala.util.Random) = FromContext { p ⇒
import p._
val discrete = Genome.discrete(om.genome)
val rejectValue = om.reject.map(f ⇒ GAIntegration.rejectValue[G](f, om.genome, _.continuousValues.toVector, _.discreteValues.toVector).from(context))
MGONSGA2.adaptiveBreeding[S, Phenotype](n, om.operatorExploration, discrete, Objective.toFitnessFunction(om.phenotypeContent, om.objectives).from(context), rejectValue)(s, individuals, rng)
}
def elitism(population: Vector[I], candidates: Vector[I], s: S, evaluated: Long, rng: scala.util.Random) = FromContext { p ⇒
import p._
val (s2, elited) = MGONSGA2.elitism[S, Phenotype](om.mu, Genome.continuous(om.genome), Objective.toFitnessFunction(om.phenotypeContent, om.objectives).from(context))(s, population, candidates, rng)
val s3 = Focus[S](_.generation).modify(_ + 1)(s2)
val s4 = Focus[S](_.evaluated).modify(_ + evaluated)(s3)
(s4, elited)
}
def migrateToIsland(population: Vector[I]) = DeterministicGAIntegration.migrateToIsland(population)
def migrateFromIsland(population: Vector[I], state: S) = DeterministicGAIntegration.migrateFromIsland(population)
def afterEvaluated(g: Long, s: S, population: Vector[I]): Boolean = mgo.evolution.stop.afterEvaluated[S, I](g, Focus[S](_.evaluated))(s, population)
def afterGeneration(g: Long, s: S, population: Vector[I]): Boolean = mgo.evolution.stop.afterGeneration[S, I](g, Focus[S](_.generation))(s, population)
def afterDuration(d: Time, s: S, population: Vector[I]): Boolean = mgo.evolution.stop.afterDuration[S, I](d, Focus[S](_.startTime))(s, population)
}
}
}
case class DeterministicParams(
mu: Int,
genome: Genome,
phenotypeContent: PhenotypeContent,
objectives: Seq[Objective],
operatorExploration: Double,
reject: Option[Condition])
object StochasticParams {
import mgo.evolution.algorithm.{ CDGenome, NoisyNSGA2 ⇒ MGONoisyNSGA2, _ }
implicit def integration = new MGOAPI.Integration[StochasticParams, (Vector[Double], Vector[Int]), Phenotype] {
type G = CDGenome.Genome
type I = CDGenome.NoisyIndividual.Individual[Phenotype]
type S = EvolutionState[Unit]
def iManifest = implicitly[Manifest[I]]
def gManifest = implicitly
def sManifest = implicitly
def operations(om: StochasticParams) = new Ops {
override def metadata(generation: Long, saveOption: SaveOption) =
EvolutionMetadata.StochasticNSGA2(
genome = MetadataGeneration.genomeData(om.genome),
objective = om.objectives.map(MetadataGeneration.objectiveData),
sample = om.historySize,
populationSize = om.mu,
generation = generation,
saveOption = saveOption
)
def startTimeLens = GenLens[S](_.startTime)
def generationLens = GenLens[S](_.generation)
def evaluatedLens = GenLens[S](_.evaluated)
def genomeValues(genome: G) = MGOAPI.paired(CDGenome.continuousValues.get _, CDGenome.discreteValues.get _)(genome)
def buildGenome(v: (Vector[Double], Vector[Int])): G = CDGenome.buildGenome(v._1, None, v._2, None)
def buildGenome(vs: Vector[Variable[_]]) = buildGenome(Genome.fromVariables(vs, om.genome))
def buildIndividual(genome: G, phenotype: Phenotype, context: Context) = CDGenome.NoisyIndividual.buildIndividual(genome, phenotype)
def initialState = EvolutionState[Unit](s = ())
def aggregate = Objective.aggregate(om.phenotypeContent, om.objectives)
def result(population: Vector[I], state: S, keepAll: Boolean, includeOutputs: Boolean) = FromContext { p ⇒
import p._
val res = MGONoisyNSGA2.result(population, aggregate.from(context), Genome.continuous(om.genome), keepAll = keepAll)
val genomes = GAIntegration.genomesOfPopulationToVariables(om.genome, res.map(_.continuous) zip res.map(_.discrete), scale = false)
val fitness = GAIntegration.objectivesOfPopulationToVariables(om.objectives, res.map(_.fitness))
val samples = Variable(GAIntegration.samplesVal.array, res.map(_.replications).toArray)
val outputValues = if (includeOutputs) StochasticGAIntegration.outputValues(om.phenotypeContent, res.map(_.individual.phenotypeHistory)) else Seq()
genomes ++ fitness ++ Seq(samples) ++ outputValues
}
def initialGenomes(n: Int, rng: scala.util.Random) = FromContext { p ⇒
import p._
val continuous = Genome.continuous(om.genome)
val discrete = Genome.discrete(om.genome)
val rejectValue = om.reject.map(f ⇒ GAIntegration.rejectValue[G](f, om.genome, _.continuousValues.toVector, _.discreteValues.toVector).from(context))
MGONoisyNSGA2.initialGenomes(n, continuous, discrete, rejectValue, rng)
}
def breeding(individuals: Vector[I], n: Int, s: S, rng: util.Random) = FromContext { p ⇒
import p._
val discrete = Genome.discrete(om.genome)
val rejectValue = om.reject.map(f ⇒ GAIntegration.rejectValue[G](f, om.genome, _.continuousValues.toVector, _.discreteValues.toVector).from(context))
MGONoisyNSGA2.adaptiveBreeding[S, Phenotype](n, om.operatorExploration, om.cloneProbability, aggregate.from(context), discrete, rejectValue) apply (s, individuals, rng)
}
def elitism(population: Vector[I], candidates: Vector[I], s: S, evaluated: Long, rng: util.Random) = FromContext { p ⇒
import p._
val (s2, elited) = MGONoisyNSGA2.elitism[S, Phenotype](om.mu, om.historySize, aggregate.from(context), Genome.continuous(om.genome)) apply (s, population, candidates, rng)
val s3 = Focus[S](_.generation).modify(_ + 1)(s2)
val s4 = Focus[S](_.evaluated).modify(_ + evaluated)(s3)
(s4, elited)
}
def migrateToIsland(population: Vector[I]) = StochasticGAIntegration.migrateToIsland[I](population, Focus[I](_.historyAge))
def migrateFromIsland(population: Vector[I], state: S) = StochasticGAIntegration.migrateFromIsland[I, Phenotype](population, Focus[I](_.historyAge), Focus[I](_.phenotypeHistory))
def afterEvaluated(g: Long, s: S, population: Vector[I]): Boolean = mgo.evolution.stop.afterEvaluated[S, I](g, Focus[S](_.evaluated))(s, population)
def afterGeneration(g: Long, s: S, population: Vector[I]): Boolean = mgo.evolution.stop.afterGeneration[S, I](g, Focus[S](_.generation))(s, population)
def afterDuration(d: Time, s: S, population: Vector[I]): Boolean = mgo.evolution.stop.afterDuration[S, I](d, Focus[S](_.startTime))(s, population)
}
}
}
case class StochasticParams(
mu: Int,
operatorExploration: Double,
genome: Genome,
phenotypeContent: PhenotypeContent,
objectives: Seq[Objective],
historySize: Int,
cloneProbability: Double,
reject: Option[Condition]
)
def apply[P](
genome: Genome,
objective: Objectives,
outputs: Seq[Val[_]] = Seq(),
populationSize: Int = 200,
stochastic: OptionalArgument[Stochastic] = None,
reject: OptionalArgument[Condition] = None
): EvolutionWorkflow =
EvolutionWorkflow.stochasticity(objective, stochastic.option) match {
case None ⇒
val exactObjectives = Objectives.toExact(objective)
val phenotypeContent = PhenotypeContent(Objectives.prototypes(exactObjectives), outputs)
EvolutionWorkflow.deterministicGAIntegration(
DeterministicParams(populationSize, genome, phenotypeContent, exactObjectives, EvolutionWorkflow.operatorExploration, reject),
genome,
phenotypeContent,
validate = Objectives.validate(exactObjectives, outputs)
)
case Some(stochasticValue) ⇒
val noisyObjectives = Objectives.toNoisy(objective)
val phenotypeContent = PhenotypeContent(Objectives.prototypes(noisyObjectives), outputs)
def validation: Validate = {
val aOutputs = outputs.map(_.toArray)
Objectives.validate(noisyObjectives, aOutputs)
}
EvolutionWorkflow.stochasticGAIntegration(
StochasticParams(populationSize, EvolutionWorkflow.operatorExploration, genome, phenotypeContent, noisyObjectives, stochasticValue.sample, stochasticValue.reevaluate, reject.option),
genome,
phenotypeContent,
stochasticValue,
validate = validation
)
}
}
import EvolutionWorkflow._
object NSGA2Evolution {
import org.openmole.core.dsl.DSL
implicit def method: ExplorationMethod[NSGA2Evolution, EvolutionWorkflow] =
p ⇒
EvolutionPattern.build(
algorithm =
NSGA2(
populationSize = p.populationSize,
genome = p.genome,
objective = p.objective,
outputs = p.evaluation.outputs,
stochastic = p.stochastic,
reject = p.reject
),
evaluation = p.evaluation,
termination = p.termination,
parallelism = p.parallelism,
distribution = p.distribution,
suggestion = p.suggestion(p.genome),
scope = p.scope
)
implicit def patternContainer: ExplorationMethodSetter[NSGA2Evolution, EvolutionPattern] = (e, p) ⇒ e.copy(distribution = p)
}
@Lenses case class NSGA2Evolution(
genome: Genome,
objective: Objectives,
evaluation: DSL,
termination: OMTermination,
populationSize: Int = 200,
stochastic: OptionalArgument[Stochastic] = None,
reject: OptionalArgument[Condition] = None,
parallelism: Int = EvolutionWorkflow.parallelism,
distribution: EvolutionPattern = SteadyState(),
suggestion: Suggestion = Suggestion.empty,
scope: DefinitionScope = "nsga2")
| openmole/openmole | openmole/plugins/org.openmole.plugin.method.evolution/src/main/scala/org/openmole/plugin/method/evolution/NSGA2.scala | Scala | agpl-3.0 | 14,273 |
// Copyright (C) 2015 Fabio Petroni
// Contact: http://www.fabiopetroni.com
//
// This file is part of CoreScript (a script to covert row file in libFM compliant format).
//
// CoreScript is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// CoreScript is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with CoreScript. If not, see <http://www.gnu.org/licenses/>.
//
// Based on the publication:
// - Fabio Petroni, Luciano Del Corro and Rainer Gemulla (2015):
// "CORE: Context-Aware Open Relation Extraction with Factorization Machines".
// EMNLP, 2015.
package evaluation
import java.util.Calendar
import java.text.SimpleDateFormat
import collection.mutable.{HashSet, HashMap, ArrayBuffer}
import io.{Source,StdIn}
import java.io.{File,InputStream,PrintStream,FileInputStream}
import collection.mutable
/**
* @author Sebastian Riedel adn Fabio Petroni
*/
object AnnotationTool {
case class Annotation(tuple: Seq[Any], label: String, correct: Boolean) {
override def toString = (Seq(if (correct) "1" else "0", label) ++ tuple).mkString("\t")
def fact = tuple -> label
}
def loadAnnotations(in: InputStream, out: Option[PrintStream] = None) = {
println("Reading in annotations...")
val result = new mutable.HashMap[(Seq[Any], String), Annotation]()
for (line <- Source.fromInputStream(in).getLines()) {
val fields = line.split("\\t")
val correct = fields(0) == "1"
val label = fields(1)
val tuple = fields.drop(2).toSeq
result(tuple -> label) = Annotation(tuple, label, correct)
for (o <- out) o.println(line)
}
result
}
}
| fabiopetroni/CORE | EvaluationManager/AnnotationTool.scala | Scala | gpl-3.0 | 2,059 |
/*
* Copyright 2010-2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package mongodb
import util.{ConnectionIdentifier, DefaultConnectionIdentifier}
import json.JsonAST.JObject
import java.util.UUID
import com.mongodb._
import org.bson.types.ObjectId
/*
* extend case class with this trait
*/
trait MongoDocument[BaseDocument] extends JsonObject[BaseDocument] {
self: BaseDocument =>
def _id: Any
def meta: MongoDocumentMeta[BaseDocument]
def delete {
meta.delete("_id", _id)
}
def save = meta.save(this)
def getRef: Option[MongoRef] = _id match {
case oid: ObjectId => Some(MongoRef(meta.collectionName, oid))
case _ => None
}
}
/*
* extend case class companion objects with this trait
*/
trait MongoDocumentMeta[BaseDocument] extends JsonObjectMeta[BaseDocument] with MongoMeta[BaseDocument] {
/**
* Override this to specify a ConnectionIdentifier.
*/
def connectionIdentifier: ConnectionIdentifier = DefaultConnectionIdentifier
/*
* Use the collection associated with this Meta.
*/
def useColl[T](f: DBCollection => T): T =
MongoDB.useCollection(connectionIdentifier, collectionName)(f)
/*
* Use the db associated with this Meta.
*/
def useDb[T](f: DB => T): T = MongoDB.use(connectionIdentifier)(f)
def create(dbo: DBObject): BaseDocument = {
create(JObjectParser.serialize(dbo).asInstanceOf[JObject])
}
/**
* Find a single row by a qry, using a DBObject.
*/
def find(qry: DBObject): Option[BaseDocument] = {
MongoDB.useCollection(connectionIdentifier, collectionName) ( coll =>
coll.findOne(qry) match {
case null => None
case dbo => {
Some(create(dbo))
}
}
)
}
/**
* Find a single document by _id using a String.
*/
def find(s: String): Option[BaseDocument] =
if (ObjectId.isValid(s))
find(new BasicDBObject("_id", new ObjectId(s)))
else
find(new BasicDBObject("_id", s))
/**
* Find a single document by _id using an ObjectId.
*/
def find(oid: ObjectId): Option[BaseDocument] = find(new BasicDBObject("_id", oid))
/**
* Find a single document by _id using a UUID.
*/
def find(uuid: UUID): Option[BaseDocument] = find(new BasicDBObject("_id", uuid))
/**
* Find a single document by a qry using String, Any inputs
*/
def find(k: String, v: Any): Option[BaseDocument] = find(new BasicDBObject(k, v))
/**
* Find a single document by a qry using a json query
*/
def find(json: JObject): Option[BaseDocument] = find(JObjectParser.parse(json))
/**
* Find all documents in this collection
*/
def findAll: List[BaseDocument] = {
import scala.collection.JavaConversions._
MongoDB.useCollection(connectionIdentifier, collectionName)(coll => {
/** Mongo Cursors are both Iterable and Iterator,
* so we need to reduce ambiguity for implicits
*/
(coll.find: Iterator[DBObject]).map(create).toList
})
}
/**
* Find all documents using a DBObject query.
*/
def findAll(qry: DBObject, sort: Option[DBObject], opts: FindOption*): List[BaseDocument] = {
import scala.collection.JavaConversions._
val findOpts = opts.toList
MongoDB.useCollection(connectionIdentifier, collectionName) ( coll => {
val cur = coll.find(qry).limit(
findOpts.find(_.isInstanceOf[Limit]).map(x => x.value).getOrElse(0)
).skip(
findOpts.find(_.isInstanceOf[Skip]).map(x => x.value).getOrElse(0)
)
sort.foreach( s => cur.sort(s))
/** Mongo Cursors are both Iterable and Iterator,
* so we need to reduce ambiguity for implicits
*/
(cur: Iterator[DBObject]).map(create).toList
})
}
/**
* Find all documents using a DBObject query.
*/
def findAll(qry: DBObject, opts: FindOption*): List[BaseDocument] =
findAll(qry, None, opts :_*)
/**
* Find all documents using a DBObject query with sort
*/
def findAll(qry: DBObject, sort: DBObject, opts: FindOption*): List[BaseDocument] =
findAll(qry, Some(sort), opts :_*)
/**
* Find all documents using a JObject query
*/
def findAll(qry: JObject, opts: FindOption*): List[BaseDocument] =
findAll(JObjectParser.parse(qry), None, opts :_*)
/**
* Find all documents using a JObject query with sort
*/
def findAll(qry: JObject, sort: JObject, opts: FindOption*): List[BaseDocument] =
findAll(JObjectParser.parse(qry), Some(JObjectParser.parse(sort)), opts :_*)
/**
* Find all documents using a k, v query
*/
def findAll(k: String, o: Any, opts: FindOption*): List[BaseDocument] =
findAll(new BasicDBObject(k, o), None, opts :_*)
/**
* Find all documents using a k, v query with JObject sort
*/
def findAll(k: String, o: Any, sort: JObject, opts: FindOption*): List[BaseDocument] =
findAll(new BasicDBObject(k, o), Some(JObjectParser.parse(sort)), opts :_*)
/*
* Save a document to the db
*/
def save(in: BaseDocument) {
MongoDB.use(connectionIdentifier) ( db => {
save(in, db)
})
}
/*
* Save a document to the db using the given Mongo instance
*/
def save(in: BaseDocument, db: DB) {
db.getCollection(collectionName).save(JObjectParser.parse(toJObject(in)))
}
/*
* Update document with a JObject query using the given Mongo instance
*/
def update(qry: JObject, newbd: BaseDocument, db: DB, opts: UpdateOption*) {
update(qry, toJObject(newbd), db, opts :_*)
}
/*
* Update document with a JObject query
*/
def update(qry: JObject, newbd: BaseDocument, opts: UpdateOption*) {
MongoDB.use(connectionIdentifier) ( db => {
update(qry, newbd, db, opts :_*)
})
}
}
| lzpfmh/framework-2 | persistence/mongodb/src/main/scala/net/liftweb/mongodb/MongoDocument.scala | Scala | apache-2.0 | 6,241 |
package com.snapswap.telesign
object EnumUseCaseCodes extends Enumeration {
type UseCaseCode = Value
/**
* Prevent account takeover/compromise
*/
val ATCK = Value("ATCK")
/**
* Prevent bulk account creation and fraud
*/
val BACF = Value("BACF")
/**
* Prevent bulk account creation and spam
*/
val BACS = Value("BACS")
/**
* Prevent chargebacks
*/
val CHBK = Value("CHBK")
/**
* Calendar Event
*/
val CLDR = Value("CLDR")
/**
* Prevent false lead entry
*/
val LEAD = Value("LEAD")
/**
* Other
*/
val OTHR = Value("OTHR")
/**
* Password reset
*/
val PWRT = Value("PWRT")
/**
* Prevent fake/missed reservations
*/
val RESV = Value("RESV")
/**
* Prevent prescription fraud
*/
val RXPF = Value("RXPF")
/**
* Shipping Notification
*/
val SHIP = Value("SHIP")
/**
* Prevent telecom fraud
*/
val TELF = Value("TELF")
/**
* Prevent identity theft
*/
val THEF = Value("THEF")
/**
* Transaction Verification
*/
val TRVF = Value("TRVF")
/**
* Unknown/prefer not to sa
*/
val UNKN = Value("UNKN")
} | snap-swap/telesign-api-client | src/main/scala/com/snapswap/telesign/EnumUseCaseCodes.scala | Scala | mit | 1,168 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.hibench.sparkbench.micro
import com.intel.hibench.sparkbench.common.IOCommon
import org.apache.spark._
import org.apache.spark.rdd.RDD
import scala.reflect.ClassTag
object ScalaSort{
implicit def rddToHashedRDDFunctions[K : Ordering : ClassTag, V: ClassTag]
(rdd: RDD[(K, V)]) = new ConfigurableOrderedRDDFunctions[K, V, (K, V)](rdd)
def main(args: Array[String]){
if (args.length != 2){
System.err.println(
s"Usage: $ScalaSort <INPUT_HDFS> <OUTPUT_HDFS>"
)
System.exit(1)
}
val sparkConf = new SparkConf().setAppName("ScalaSort")
val sc = new SparkContext(sparkConf)
val parallel = sc.getConf.getInt("spark.default.parallelism", sc.defaultParallelism)
val reducer = IOCommon.getProperty("hibench.default.shuffle.parallelism")
.getOrElse((parallel / 2).toString).toInt
val io = new IOCommon(sc)
val data = io.load[String](args(0)).map((_, 1))
val partitioner = new HashPartitioner(partitions = reducer)
val sorted = data.sortByKeyWithPartitioner(partitioner = partitioner).map(_._1)
io.save(args(1), sorted)
sc.stop()
}
}
| nareshgundla/HiBench | sparkbench/micro/src/main/scala/com/intel/sparkbench/micro/ScalaSort.scala | Scala | apache-2.0 | 1,947 |
package utils.helpers
import models.domain._
import controllers.mappings.Mappings._
import play.api.i18n.{MMessages, MessagesApi, Lang}
import play.api.Play.current
case class PastPresentLabelHelper(implicit claim: Claim, lang:Lang)
object PastPresentLabelHelper {
val messagesApi: MessagesApi = current.injector.instanceOf[MMessages]
def labelForSelfEmployment(implicit claim: Claim, lang: Lang, labelKey: String) = {
messagesApi(isSelfEmployed(claim) match {
case true => labelKey + ".present"
case false => labelKey + ".past"
})
}
def valuesForSelfEmployment(implicit claim: Claim, lang: Lang, pastYes: String, pastNo: String, presentYes: String, presentNo: String) = {
isSelfEmployed(claim) match {
case true => 'values -> Seq("yes" -> messagesApi(presentYes), "no" -> messagesApi(presentNo))
case false => 'values -> Seq("yes" -> messagesApi(pastYes), "no" -> messagesApi(pastNo))
}
}
private def isSelfEmployed(claim: Claim) = claim.questionGroup(SelfEmploymentDates) match {
case Some(a: SelfEmploymentDates) => a.stillSelfEmployed == "yes"
case _ => false
}
def labelForEmployment(finishedThisJob:String, lang: Lang, labelKey: String) = {
messagesApi(finishedThisJob == yes match {
case true => labelKey + ".past"
case false => labelKey + ".present"
})
}
def labelForEmployment(implicit claim: Claim, lang: Lang, labelKey: String, jobID: String) = {
messagesApi(isTheJobFinished(claim, jobID) match {
case true => labelKey + ".past"
case false => labelKey + ".present"
})
}
def valuesForEmployment(implicit claim: Claim, lang: Lang, pastYes: String, pastNo: String, presentYes: String, presentNo: String, jobID: String) = {
isTheJobFinished(claim, jobID) match {
case false => 'values -> Seq("yes" -> messagesApi(presentYes), "no" -> messagesApi(presentNo))
case true => 'values -> Seq("yes" -> messagesApi(pastYes), "no" -> messagesApi(pastNo))
}
}
private def isTheJobFinished(claim: Claim, jobID: String) = theJobs(claim).questionGroup(jobID, JobDetails) match {
case Some(j: JobDetails) => j.finishedThisJob == yes
case _ => false
}
private def theJobs(implicit claim: Claim) = claim.questionGroup(Jobs) match {
case Some(js: Jobs) => js
case _ => Jobs()
}
}
| Department-for-Work-and-Pensions/ClaimCapture | c3/app/utils/helpers/PastPresentLabelHelper.scala | Scala | mit | 2,339 |
package ch02
class Ex24Uncurry {
def uncurry[A,B,C](f: A => B => C): (A,B) => C = (a: A, b: B) => f(a)(b)
}
| IrfanAnsari/fpinscala | src/main/scala/ch02/Ex24Uncurry.scala | Scala | mit | 113 |
package com.twitter.finatra.tests.json.internal.caseclass.validation.domain
import com.twitter.finatra.validation.{MethodValidation, NotEmpty, ValidationResult}
case class Address(
@NotEmpty street: Option[String] = None,
@NotEmpty city: String,
@NotEmpty state: String) {
@MethodValidation
def validateState = {
ValidationResult(
state == "CA" || state == "MD" || state == "WI",
"state must be one of [CA, MD, WI]")
}
}
| nkhuyu/finatra | jackson/src/test/scala/com/twitter/finatra/tests/json/internal/caseclass/validation/domain/Address.scala | Scala | apache-2.0 | 452 |
package editor.model
import scala.collection.mutable._
class EditableGameObject(var name: String = "object", var parent: String = "root", var kind: String = "thing") {
var article = "a"
var description: String = "This object has no description."
val adjectives: Set[String] = Set.empty
val nouns: Set[String] = Set.empty
val actions: Set[String] = Set.empty
val properties: Set[String] = Set.empty
val values: Map[String, Int] = Map.empty
val objects: Map[String, String] = Map.empty
val relations: Map[String, Set[String]] = Map.empty
val messages: Map[String, String] = Map.empty
val responses: ListBuffer[EditableCondResp] = ListBuffer.empty
}
| gregwk/clay-pot | Game_Editor/src/editor/model/EditableGameObject.scala | Scala | mit | 694 |
package org.jetbrains.plugins.scala
package lang
package psi
package api
package expr
import com.intellij.psi.{PsiAnnotationMemberValue, PsiElement}
import org.jetbrains.plugins.scala.lang.parser.ScalaElementTypes
import org.jetbrains.plugins.scala.lang.psi.api.base.ScConstructor
import org.jetbrains.plugins.scala.lang.psi.impl.expr.ScNameValuePairImpl
/**
* @author Alexander Podkhalyuzin
* Date: 07.03.2008
*/
trait ScAnnotationExpr extends ScalaPsiElement {
def constr = findChildByClassScala(classOf[ScConstructor])
def getAttributes: Seq[ScNameValuePair] = {
val constr = findChildByClassScala(classOf[ScConstructor])
if (constr == null) return Seq.empty
val args = constr.findFirstChildByType(ScalaElementTypes.ARG_EXPRS)
if (args == null) return Seq.empty
args.asInstanceOf[ScArgumentExprList].findChildrenByType(ScalaElementTypes.ASSIGN_STMT) map {
case stmt: ScAssignStmt => new ScNameValueAssignment(stmt)
}
}
private class ScNameValueAssignment(assign: ScAssignStmt) extends ScNameValuePairImpl(assign.getNode) {
override def nameId: PsiElement = assign.getLExpression
override def getValue: PsiAnnotationMemberValue = (assign.getRExpression map {
case annotationMember: PsiAnnotationMemberValue => annotationMember
case _ => null
}).orNull
}
} | triggerNZ/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/api/expr/ScAnnotationExpr.scala | Scala | apache-2.0 | 1,329 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server.epoch
import java.util.{Map => JMap}
import kafka.server.KafkaConfig._
import kafka.server.{BlockingSend, KafkaServer, ReplicaFetcherBlockingSend}
import kafka.utils.TestUtils._
import kafka.utils.{Logging, TestUtils}
import kafka.zk.ZooKeeperTestHarness
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import org.apache.kafka.common.metrics.Metrics
import org.apache.kafka.common.protocol.Errors._
import org.apache.kafka.common.requests.EpochEndOffset._
import org.apache.kafka.common.serialization.StringSerializer
import org.apache.kafka.common.utils.{LogContext, SystemTime}
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.protocol.ApiKeys
import org.junit.Assert._
import org.junit.{After, Test}
import org.apache.kafka.common.requests.{EpochEndOffset, OffsetsForLeaderEpochRequest, OffsetsForLeaderEpochResponse}
import scala.collection.JavaConverters._
import scala.collection.Map
class LeaderEpochIntegrationTest extends ZooKeeperTestHarness with Logging {
var brokers: Seq[KafkaServer] = null
val topic1 = "foo"
val topic2 = "bar"
val t1p0 = new TopicPartition(topic1, 0)
val t1p1 = new TopicPartition(topic1, 1)
val t1p2 = new TopicPartition(topic1, 2)
val t2p0 = new TopicPartition(topic2, 0)
val t2p2 = new TopicPartition(topic2, 2)
val tp = t1p0
var producer: KafkaProducer[Array[Byte], Array[Byte]] = null
@After
override def tearDown() {
if (producer != null)
producer.close()
TestUtils.shutdownServers(brokers)
super.tearDown()
}
@Test
def shouldAddCurrentLeaderEpochToMessagesAsTheyAreWrittenToLeader() {
brokers = (0 to 1).map { id => createServer(fromProps(createBrokerConfig(id, zkConnect))) }
// Given two topics with replication of a single partition
for (topic <- List(topic1, topic2)) {
createTopic(zkClient, topic, Map(0 -> Seq(0, 1)), servers = brokers)
}
// When we send four messages
sendFourMessagesToEachTopic()
//Then they should be stamped with Leader Epoch 0
var expectedLeaderEpoch = 0
waitUntilTrue(() => messagesHaveLeaderEpoch(brokers(0), expectedLeaderEpoch, 0), "Leader epoch should be 0")
//Given we then bounce the leader
brokers(0).shutdown()
brokers(0).startup()
//Then LeaderEpoch should now have changed from 0 -> 1
expectedLeaderEpoch = 1
waitForEpochChangeTo(topic1, 0, expectedLeaderEpoch)
waitForEpochChangeTo(topic2, 0, expectedLeaderEpoch)
//Given we now send messages
sendFourMessagesToEachTopic()
//The new messages should be stamped with LeaderEpoch = 1
waitUntilTrue(() => messagesHaveLeaderEpoch(brokers(0), expectedLeaderEpoch, 4), "Leader epoch should be 1")
}
@Test
def shouldSendLeaderEpochRequestAndGetAResponse(): Unit = {
//3 brokers, put partition on 100/101 and then pretend to be 102
brokers = (100 to 102).map { id => createServer(fromProps(createBrokerConfig(id, zkConnect))) }
val assignment1 = Map(0 -> Seq(100), 1 -> Seq(101))
TestUtils.createTopic(zkClient, topic1, assignment1, brokers)
val assignment2 = Map(0 -> Seq(100))
TestUtils.createTopic(zkClient, topic2, assignment2, brokers)
//Send messages equally to the two partitions, then half as many to a third
producer = createProducer(getBrokerListStrFromServers(brokers), retries = 5, acks = -1)
(0 until 10).foreach { _ =>
producer.send(new ProducerRecord(topic1, 0, null, "IHeartLogs".getBytes))
}
(0 until 20).foreach { _ =>
producer.send(new ProducerRecord(topic1, 1, null, "OhAreThey".getBytes))
}
(0 until 30).foreach { _ =>
producer.send(new ProducerRecord(topic2, 0, null, "IReallyDo".getBytes))
}
producer.flush()
val fetcher0 = new TestFetcherThread(sender(from = brokers(2), to = brokers(0)))
val epochsRequested = Map(t1p0 -> 0, t1p1 -> 0, t2p0 -> 0, t2p2 -> 0)
//When
val offsetsForEpochs = fetcher0.leaderOffsetsFor(epochsRequested)
//Then end offset should be correct
assertEquals(10, offsetsForEpochs(t1p0).endOffset)
assertEquals(30, offsetsForEpochs(t2p0).endOffset)
//And should get no leader for partition error from t1p1 (as it's not on broker 0)
assertTrue(offsetsForEpochs(t1p1).hasError)
assertEquals(UNKNOWN_TOPIC_OR_PARTITION, offsetsForEpochs(t1p1).error)
assertEquals(UNDEFINED_EPOCH_OFFSET, offsetsForEpochs(t1p1).endOffset)
//Repointing to broker 1 we should get the correct offset for t1p1
val fetcher1 = new TestFetcherThread(sender(from = brokers(2), to = brokers(1)))
val offsetsForEpochs1 = fetcher1.leaderOffsetsFor(epochsRequested)
assertEquals(20, offsetsForEpochs1(t1p1).endOffset)
}
@Test
def shouldIncreaseLeaderEpochBetweenLeaderRestarts(): Unit = {
//Setup: we are only interested in the single partition on broker 101
brokers = Seq(100, 101).map { id => createServer(fromProps(createBrokerConfig(id, zkConnect))) }
def leo() = brokers(1).replicaManager.getReplica(tp).get.logEndOffset.messageOffset
TestUtils.createTopic(zkClient, tp.topic, Map(tp.partition -> Seq(101)), brokers)
producer = createProducer(getBrokerListStrFromServers(brokers), retries = 10, acks = -1)
//1. Given a single message
producer.send(new ProducerRecord(tp.topic, tp.partition, null, "IHeartLogs".getBytes)).get
var fetcher = new TestFetcherThread(sender(brokers(0), brokers(1)))
//Then epoch should be 0 and leo: 1
var offset = fetcher.leaderOffsetsFor(Map(tp -> 0))(tp).endOffset()
assertEquals(1, offset)
assertEquals(leo(), offset)
//2. When broker is bounced
brokers(1).shutdown()
brokers(1).startup()
producer.send(new ProducerRecord(tp.topic, tp.partition, null, "IHeartLogs".getBytes)).get
fetcher = new TestFetcherThread(sender(brokers(0), brokers(1)))
//Then epoch 0 should still be the start offset of epoch 1
offset = fetcher.leaderOffsetsFor(Map(tp -> 0))(tp).endOffset()
assertEquals(1, offset)
//Then epoch 2 should be the leo (NB: The leader epoch goes up in factors of 2 - This is because we have to first change leader to -1 and then change it again to the live replica)
assertEquals(2, fetcher.leaderOffsetsFor(Map(tp -> 2))(tp).endOffset())
assertEquals(leo(), fetcher.leaderOffsetsFor(Map(tp -> 2))(tp).endOffset())
//3. When broker is bounced again
brokers(1).shutdown()
brokers(1).startup()
producer.send(new ProducerRecord(tp.topic, tp.partition, null, "IHeartLogs".getBytes)).get
fetcher = new TestFetcherThread(sender(brokers(0), brokers(1)))
//Then Epoch 0 should still map to offset 1
assertEquals(1, fetcher.leaderOffsetsFor(Map(tp -> 0))(tp).endOffset())
//Then Epoch 2 should still map to offset 2
assertEquals(2, fetcher.leaderOffsetsFor(Map(tp -> 2))(tp).endOffset())
//Then Epoch 4 should still map to offset 2
assertEquals(3, fetcher.leaderOffsetsFor(Map(tp -> 4))(tp).endOffset())
assertEquals(leo(), fetcher.leaderOffsetsFor(Map(tp -> 4))(tp).endOffset())
//Adding some extra assertions here to save test setup.
shouldSupportRequestsForEpochsNotOnTheLeader(fetcher)
}
//Appended onto the previous test to save on setup cost.
def shouldSupportRequestsForEpochsNotOnTheLeader(fetcher: TestFetcherThread): Unit = {
/**
* Asking for an epoch not present on the leader should return the
* next matching epoch, unless there isn't any, which should return
* undefined.
*/
val epoch1 = Map(t1p0 -> 1)
assertEquals(1, fetcher.leaderOffsetsFor(epoch1)(t1p0).endOffset())
val epoch3 = Map(t1p0 -> 3)
assertEquals(2, fetcher.leaderOffsetsFor(epoch3)(t1p0).endOffset())
val epoch5 = Map(t1p0 -> 5)
assertEquals(-1, fetcher.leaderOffsetsFor(epoch5)(t1p0).endOffset())
}
private def sender(from: KafkaServer, to: KafkaServer): BlockingSend = {
val endPoint = from.metadataCache.getAliveBrokers.find(_.id == to.config.brokerId).get.brokerEndPoint(from.config.interBrokerListenerName)
new ReplicaFetcherBlockingSend(endPoint, from.config, new Metrics(), new SystemTime(), 42, "TestFetcher", new LogContext())
}
private def waitForEpochChangeTo(topic: String, partition: Int, epoch: Int): Unit = {
TestUtils.waitUntilTrue(() => {
brokers(0).metadataCache.getPartitionInfo(topic, partition) match {
case Some(m) => m.basePartitionState.leaderEpoch == epoch
case None => false
}
}, "Epoch didn't change")
}
private def messagesHaveLeaderEpoch(broker: KafkaServer, expectedLeaderEpoch: Int, minOffset: Int): Boolean = {
var result = true
for (topic <- List(topic1, topic2)) {
val tp = new TopicPartition(topic, 0)
val leo = broker.getLogManager().getLog(tp).get.logEndOffset
result = result && leo > 0 && brokers.forall { broker =>
broker.getLogManager().getLog(tp).get.logSegments.iterator.forall { segment =>
if (segment.read(minOffset, None, Integer.MAX_VALUE) == null) {
false
} else {
segment.read(minOffset, None, Integer.MAX_VALUE)
.records.batches().iterator().asScala.forall(
expectedLeaderEpoch == _.partitionLeaderEpoch()
)
}
}
}
}
result
}
private def sendFourMessagesToEachTopic() = {
val testMessageList1 = List("test1", "test2", "test3", "test4")
val testMessageList2 = List("test5", "test6", "test7", "test8")
val producer = TestUtils.createProducer(TestUtils.getBrokerListStrFromServers(brokers), retries = 5, keySerializer = new StringSerializer, valueSerializer = new StringSerializer)
val records =
testMessageList1.map(m => new ProducerRecord(topic1, m, m)) ++
testMessageList2.map(m => new ProducerRecord(topic2, m, m))
records.map(producer.send).foreach(_.get)
producer.close()
}
/**
* Simulates how the Replica Fetcher Thread requests leader offsets for epochs
*/
private[epoch] class TestFetcherThread(sender: BlockingSend) extends Logging {
def leaderOffsetsFor(partitions: Map[TopicPartition, Int]): Map[TopicPartition, EpochEndOffset] = {
val request = new OffsetsForLeaderEpochRequest.Builder(ApiKeys.OFFSET_FOR_LEADER_EPOCH.latestVersion(), toJavaFormat(partitions))
val response = sender.sendRequest(request)
response.responseBody.asInstanceOf[OffsetsForLeaderEpochResponse].responses.asScala
}
def toJavaFormat(partitions: Map[TopicPartition, Int]): JMap[TopicPartition, Integer] = {
partitions.map { case (tp, epoch) => tp -> epoch.asInstanceOf[Integer] }.toMap.asJava
}
}
}
| richhaase/kafka | core/src/test/scala/unit/kafka/server/epoch/LeaderEpochIntegrationTest.scala | Scala | apache-2.0 | 11,510 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.arrow.io
import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
import org.apache.arrow.vector.ipc.message.IpcOption
import org.junit.runner.RunWith
import org.locationtech.geomesa.arrow.vector.SimpleFeatureVector.SimpleFeatureEncoding
import org.locationtech.geomesa.features.ScalaSimpleFeature
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.locationtech.geomesa.utils.io.WithClose
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class DictionaryBuildingWriterTest extends Specification {
val sft = SimpleFeatureTypes.createType("test", "name:String,dtg:Date,*geom:Point:srid=4326")
val features = (0 until 10).map { i =>
ScalaSimpleFeature.create(sft, s"0$i", s"name0${i % 2}", s"2017-03-15T00:0$i:00.000Z", s"POINT (4$i 5$i)")
}
val ipcOpts = new IpcOption() // TODO test legacy opts
"SimpleFeatureVector" should {
"dynamically encode dictionary values" >> {
val out = new ByteArrayOutputStream()
WithClose(new DictionaryBuildingWriter(sft, Seq("name"), SimpleFeatureEncoding.Max, ipcOpts)) { writer =>
features.foreach(writer.add)
writer.encode(out)
}
WithClose(SimpleFeatureArrowFileReader.streaming(() => new ByteArrayInputStream(out.toByteArray))) { reader =>
reader.dictionaries must haveSize(1)
reader.dictionaries.get("name") must beSome
reader.dictionaries("name").iterator.toSeq must containTheSameElementsAs(Seq("name00", "name01"))
WithClose(reader.features())(f => f.map(ScalaSimpleFeature.copy).toSeq mustEqual features)
}
}
}
}
| locationtech/geomesa | geomesa-arrow/geomesa-arrow-gt/src/test/scala/org/locationtech/geomesa/arrow/io/DictionaryBuildingWriterTest.scala | Scala | apache-2.0 | 2,148 |
package cromwell
import java.nio.file.{Files, Paths}
import java.util.UUID
import akka.testkit._
import wdl4s.NamespaceWithWorkflow
import wdl4s.expression.NoFunctions
import wdl4s.types.{WdlArrayType, WdlFileType, WdlStringType}
import wdl4s.values.{WdlArray, WdlFile, WdlInteger, WdlString}
import cromwell.util.SampleWdl
import scala.language.postfixOps
class ArrayWorkflowSpec extends CromwellTestkitSpec {
val tmpDir = Files.createTempDirectory("ArrayWorkflowSpec")
val ns = NamespaceWithWorkflow.load(SampleWdl.ArrayLiteral(tmpDir).wdlSource(""))
val expectedArray = WdlArray(WdlArrayType(WdlFileType), Seq(WdlFile("f1"), WdlFile("f2"), WdlFile("f3")))
"A task which contains a parameter " should {
"accept an array for the value" in {
runWdlAndAssertOutputs(
sampleWdl = SampleWdl.ArrayIO,
EventFilter.info(pattern = s"starting calls: wf.concat, wf.find, wf.serialize", occurrences = 1),
expectedOutputs = Map(
"wf.count_lines.count" -> WdlInteger(3),
"wf.count_lines_array.count" -> WdlInteger(3),
"wf.serialize.contents" -> WdlString("str1\\nstr2\\nstr3")
)
)
}
}
"A static Array[File] declaration" should {
"be a valid declaration" in {
val declaration = ns.workflow.declarations.find {_.name == "arr"}.getOrElse {
fail("Expected declaration 'arr' to be found")
}
val expression = declaration.expression.getOrElse {
fail("Expected an expression for declaration 'arr'")
}
val value = expression.evaluate((s:String) => fail("No lookups"), NoFunctions).getOrElse {
fail("Expected expression for 'arr' to evaluate")
}
value shouldEqual WdlArray(WdlArrayType(WdlStringType), Seq(WdlString("f1"), WdlString("f2"), WdlString("f3")))
}
"be usable as an input" in {
val catTask = ns.findTask("cat").getOrElse {
fail("Expected to find task 'cat'")
}
val command = catTask.instantiateCommand(Map("files" -> expectedArray), NoFunctions).getOrElse {
fail("Expected instantiation to work")
}
command shouldEqual "cat -s f1 f2 f3"
}
"Coerce Array[String] to Array[File] when running the workflow" in {
val outputs = Map(
"wf.cat.lines" -> WdlArray(WdlArrayType(WdlStringType), Seq(
WdlString("line1"),
WdlString("line2"),
WdlString("line3"),
WdlString("line4"),
WdlString("line5")
)
)
)
val uuid = UUID.randomUUID()
val sampleWdl = SampleWdl.ArrayLiteral(Paths.get("."))
runWdlAndAssertOutputs(
sampleWdl,
eventFilter = EventFilter.info(pattern = s"starting calls: wf.cat", occurrences = 1),
expectedOutputs = outputs
)
sampleWdl.cleanup()
}
}
}
| dgtester/cromwell | src/test/scala/cromwell/ArrayWorkflowSpec.scala | Scala | bsd-3-clause | 2,829 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.columnar
import java.nio.{ByteOrder, ByteBuffer}
import org.apache.spark.sql.catalyst.expressions.MutableRow
private[sql] trait NullableColumnAccessor extends ColumnAccessor {
private var nullsBuffer: ByteBuffer = _
private var nullCount: Int = _
private var seenNulls: Int = 0
private var nextNullIndex: Int = _
private var pos: Int = 0
abstract override protected def initialize(): Unit = {
nullsBuffer = underlyingBuffer.duplicate().order(ByteOrder.nativeOrder())
nullCount = ByteBufferHelper.getInt(nullsBuffer)
nextNullIndex = if (nullCount > 0) ByteBufferHelper.getInt(nullsBuffer) else -1
pos = 0
underlyingBuffer.position(underlyingBuffer.position + 4 + nullCount * 4)
super.initialize()
}
abstract override def extractTo(row: MutableRow, ordinal: Int): Unit = {
if (pos == nextNullIndex) {
seenNulls += 1
if (seenNulls < nullCount) {
nextNullIndex = ByteBufferHelper.getInt(nullsBuffer)
}
row.setNullAt(ordinal)
} else {
super.extractTo(row, ordinal)
}
pos += 1
}
abstract override def hasNext: Boolean = seenNulls < nullCount || super.hasNext
}
| pronix/spark | sql/core/src/main/scala/org/apache/spark/sql/columnar/NullableColumnAccessor.scala | Scala | apache-2.0 | 1,995 |
package com.robin.kafka
/**
* Created by robinmac on 15-7-20.
*/
object CreatingTopicScript extends App {
val baseString="/usr/lib/kafka_2.11-0.8.2.1/bin/kafka-topics.sh --zookeeper 10.20.73.186:2181,10.20.73.187:2181,10.20.73.188:2181,10.20.73.189:2181,10.20.73.190:2181 --create --partitions 1 --replication-factor 2 --topic topictest";
for(i<-1 to 300)
println(baseString+i)
}
| mengyou0304/scala_simple_work | src/main/scala/com/robin/kafka/CreatingTopic.scala | Scala | apache-2.0 | 395 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2014 Ryan C. Brozo
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.ryanbrozo.spray.hawk
import com.ryanbrozo.spray.hawk.HawkError._
import com.typesafe.config.ConfigFactory
import com.typesafe.scalalogging.StrictLogging
import spray.http.HttpHeaders._
import spray.http._
import spray.routing.RequestContext
import spray.routing.authentication.ContextAuthenticator
import scala.concurrent._
import scala.concurrent.duration._
import scala.language.{implicitConversions, postfixOps}
import scala.util._
object HawkAuthenticator {
private val _conf = ConfigFactory.load()
private val _payloadValidationEnabled = _conf.getBoolean("spray.hawk.payloadValidation")
private val _timeSkewValidationEnabled = _conf.getBoolean("spray.hawk.timeSkewValidation")
private val _timeSkewInSeconds = _conf.getLong("spray.hawk.timeSkewInSeconds")
private val _maxUserRetrieverTimeInSeconds = _conf.getLong("spray.hawk.maxUserRetrieverTimeInSeconds") seconds
def apply[U <: HawkUser](realm: String, userRetriever: UserRetriever[U])(implicit executionContext: ExecutionContext) =
new HawkAuthenticator(Util.defaultTimestampProvider, Util.defaultNonceValidator)(realm, userRetriever)
def apply[U <: HawkUser](tsProvider: TimeStampProvider)(realm: String,userRetriever: UserRetriever[U])
(implicit executionContext: ExecutionContext) =
new HawkAuthenticator(tsProvider, Util.defaultNonceValidator)(realm, userRetriever)
def apply[U <: HawkUser](nonceValidator: NonceValidator)(realm: String,userRetriever: UserRetriever[U])
(implicit executionContext: ExecutionContext) =
new HawkAuthenticator(Util.defaultTimestampProvider, nonceValidator)(realm, userRetriever)
def apply[U <: HawkUser](tsProvider: TimeStampProvider, nonceValidator: NonceValidator)(realm: String,userRetriever: UserRetriever[U])
(implicit executionContext: ExecutionContext) =
new HawkAuthenticator(tsProvider, nonceValidator)(realm, userRetriever)
}
/**
* A `ContextAuthenticator` passed to Spray that validates the credentials passed via the HTTP `Authorization` header
* using the Hawk Authentication protocol to authenticate the user and extract a user object.
*
* Example usage:
*
* {{{
* // Our User model. This needs to extend the HawkUser trait for our UserCredentialsRetriever
* // to work
* case class User(name: String, key: String, algorithm: HawkHashAlgorithms) extends HawkUser
*
* // Our user credentials retriever. Currently it returns 'Bob' along with his hawk credentials
* val userCredentialsRetriever: UserRetriever[User] = { id =>
* Future.successful {
* if (id == "dh37fgj492je") Some(User("Bob", "werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn", HawkHashAlgorithms.HawkSHA256))
* else None
* }
* }
*
* val hawkAuthenticator = HawkAuthenticator("hawk-test", userCredentialsRetriever)
*
* startServer(interface = "localhost", port = 8080) {
* path("secured") {
* authenticate(hawkAuthenticator) { user =>
* get {
* complete {
* s"Welcome to spray, \\${user.name}!"
* }
* } ~
* post {
* entity(as[String]) { body =>
* complete {
* s"Welcome to spray, \\${user.name}! Your post body was: \\$body"
* }
* }
* }
* }
* }
* }
* }}}
*
*/
class HawkAuthenticator[U <: HawkUser](timestampProvider: TimeStampProvider, nonceValidator: NonceValidator)(realm: String,
userRetriever: UserRetriever[U])
(implicit val executionContext: ExecutionContext)
extends ContextAuthenticator[U]
with StrictLogging {
import HawkAuthenticator._
val SCHEME = HEADER_NAME
def apply(ctx: RequestContext) = {
val hawkRequest = HawkRequest(ctx.request)
authenticate(hawkRequest) map {
case Right(u) => Right(u)
case Left(e) => Left(HawkRejection(e, getChallengeHeaders(e)))
}
}
/**
* Checks if given bewit credentials are valid
*
* @param hawkUserOption Hawk user, wrapped in an Option
* @param hawkRequest HawkRequest instance
* @return Either a HawkError or the validated HawkUser
*/
private def validateBewitCredentials(hawkUserOption: Option[U], hawkRequest: HawkRequest): Either[HawkError, U] = {
def checkMethod(implicit hawkUser: U): Either[HawkError, U] = {
if (hawkRequest.request.method != HttpMethods.GET)
Left(InvalidMacError)
else
Right(hawkUser)
}
def checkExpiry(implicit hawkUser: U): Either[HawkError, U] = {
val currentTimestamp = timestampProvider()
if (hawkRequest.bewitAttributes.exp * 1000 <= currentTimestamp)
Left(AccessExpiredError)
else
Right(hawkUser)
}
def checkMac(implicit hawkUser: U): Either[HawkError, U] = {
if (hawkRequest.bewitAttributes.mac != Hawk(hawkUser, hawkRequest.bewitOptions, Hawk.TYPE_BEWIT).mac)
Left(InvalidMacError)
else
Right(hawkUser)
}
hawkUserOption map { implicit hawkUser =>
for {
methodOk <- checkMethod.right
expiryOk <- checkExpiry.right
macOk <- checkMac.right
} yield expiryOk
} getOrElse Left(InvalidCredentialsError)
}
/**
* Checks if given Authorization header is valid
*
* @param hawkUserOption Hawk user, wrapped in an Option
* @param hawkRequest HawkRequest instance
* @return Either a HawkError or the validated HawkUser
*/
private def validateAuthHeaderCredentials(hawkUserOption: Option[U], hawkRequest: HawkRequest): Either[HawkError, U] = {
def checkMac(implicit hawkUser: U): Either[HawkError, U] = {
(for {
mac <- hawkRequest.authHeaderAttributes.mac if mac == Hawk(hawkUser, hawkRequest.hawkOptions, Hawk.TYPE_HEADER).mac
} yield Right(hawkUser))
.getOrElse(Left(InvalidMacError))
}
def checkPayload(implicit hawkUser: U): Either[HawkError, U] = {
hawkRequest.authHeaderAttributes.hash match {
case Some(hash) if _payloadValidationEnabled =>
(for {
(payload, contentType) <- hawkRequest.payload
hawkPayload <- Option(HawkPayload(payload, contentType, hawkUser.algorithm.hashAlgo))
if hawkPayload.hash == hash
} yield Right(hawkUser))
.getOrElse(Left(InvalidPayloadHashError))
case _ =>
// 'hash' is not supplied? then no payload validation is needed.
// Return the obtained credentials
Right(hawkUser)
}
}
def checkNonce(implicit hawkUser: U): Either[HawkError, U] = {
hawkRequest.authHeaderAttributes.nonce match {
case Some(n) if nonceValidator(n, hawkUser.key, hawkRequest.authHeaderAttributes.ts) => Right(hawkUser)
case _ => Left(InvalidNonceError)
}
}
def checkTimestamp(implicit hawkUser: U): Either[HawkError, U] = {
if (_timeSkewValidationEnabled) {
val timestamp = hawkRequest.authHeaderAttributes.ts
val currentTimestamp = timestampProvider()
val lowerBound = currentTimestamp - _timeSkewInSeconds
val upperBound = currentTimestamp + _timeSkewInSeconds
if (lowerBound <= timestamp && timestamp <= upperBound)
Right(hawkUser)
else
Left(StaleTimestampError(hawkUser))
}
else Right(hawkUser)
}
hawkUserOption map { implicit hawkUser =>
for {
macOk <- checkMac.right
// According to Hawk specs, payload validation should should only
// happen if MAC is validated.
payloadOk <- checkPayload.right
nonceOk <- checkNonce.right
tsOk <- checkTimestamp.right
} yield tsOk
} getOrElse Left(InvalidCredentialsError)
}
/**
* Produces a list of Http Challenge Headers
*
* @param hawkError HawkError used to produce the challenge headers
* @return List of challenge headers
*/
private def getChallengeHeaders(hawkError: HawkError): List[HttpHeader] = {
val params = hawkError match {
case err: StaleTimestampError =>
val currentTimestamp = timestampProvider()
Map(
"ts" -> currentTimestamp.toString,
"tsm" -> HawkTimestamp(currentTimestamp, err.hawkUser).mac,
"error" -> err.message
)
case err =>
Map(
"error" -> err.message
)
}
`WWW-Authenticate`(HttpChallenge(SCHEME, realm, params)) :: Nil
}
/**
* Authenticates an incoming request. This method checks if Hawk credentials came from bewit or Authorization header
* and validates accordingly
*
* @param hawkRequest HawkRequest instance to validate.
* @return Either a HawkError, if authorization is not valid, or a HawkUser if authorization is valid. Result is wrapped
* in a Future
*/
private def authenticate(hawkRequest: HawkRequest): Future[Either[HawkError, U]] = {
def validate(id: String, validateFunc: (Option[U], HawkRequest) => Either[HawkError, U]): Future[Either[HawkError, U]] = {
val userTry = Try {
// Assume the supplied userRetriever function can throw an exception
userRetriever(id)
}
userTry match {
case Success(userFuture) =>
userFuture map { validateFunc(_, hawkRequest) }
case Failure(e) =>
logger.warn(s"An error occurred while retrieving a hawk user: ${e.getMessage}")
Future.successful(Left(UserRetrievalError(e)))
}
}
// Determine whether to use bewit parameter or Authorization header
// Request should not have both
if (hawkRequest.hasBewit && hawkRequest.hasAuthorizationHeader) {
Future.successful(Left(MultipleAuthenticationError))
}
else {
// Ensure bewit is valid
if (hawkRequest.hasBewit) {
if (hawkRequest.bewitAttributes.isInvalid.isDefined) {
Future.successful(Left(hawkRequest.bewitAttributes.isInvalid.get))
}
else validate(hawkRequest.bewitAttributes.id, validateBewitCredentials)
}
else {
if (!hawkRequest.authHeaderAttributes.isPresent) {
Future.successful(Left(CredentialsMissingError))
}
else validate(hawkRequest.authHeaderAttributes.id, validateAuthHeaderCredentials)
}
}
}
}
| ryanbrozo/spray-hawk | lib/src/main/scala/com/ryanbrozo/spray/hawk/HawkAuthenticator.scala | Scala | mit | 11,437 |
package blended.akka
object BlendedAkkaConstants {
val osgiFacadePath = "OSGI"
val configLocatorPath = "ConfigLocator"
}
| lefou/blended | blended.akka/src/main/scala/blended/akka/BlendedAkkaConstants.scala | Scala | apache-2.0 | 136 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.stream.sql.join
import org.apache.flink.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.planner.utils.{StreamTableTestUtil, TableTestBase}
import org.hamcrest.Matchers.containsString
import org.junit.Test
import java.sql.Timestamp
class TemporalFunctionJoinTest extends TableTestBase {
val util: StreamTableTestUtil = streamTestUtil()
util.addDataStream[(Long, String)](
"Orders", 'o_amount, 'o_currency, 'o_rowtime.rowtime)
private val ratesHistory = util.addDataStream[(String, Int, Timestamp)](
"RatesHistory", 'currency, 'rate, 'rowtime.rowtime)
util.addFunction(
"Rates",
ratesHistory.createTemporalTableFunction($"rowtime", $"currency"))
util.addDataStream[(Long, String)](
"ProctimeOrders", 'o_amount, 'o_currency, 'o_proctime.proctime)
private val proctimeRatesHistory = util.addDataStream[(String, Int)](
"ProctimeRatesHistory", 'currency, 'rate, 'proctime.proctime)
util.addFunction(
"ProctimeRates",
proctimeRatesHistory.createTemporalTableFunction($"proctime", $"currency"))
@Test
def testSimpleJoin(): Unit = {
val sqlQuery = "SELECT " +
"o_amount * rate as rate " +
"FROM Orders AS o, " +
"LATERAL TABLE (Rates(o.o_rowtime)) AS r " +
"WHERE currency = o_currency"
util.verifyExecPlan(sqlQuery)
}
@Test
def testSimpleProctimeJoin(): Unit = {
val sqlQuery = "SELECT " +
"o_amount * rate as rate " +
"FROM ProctimeOrders AS o, " +
"LATERAL TABLE (ProctimeRates(o.o_proctime)) AS r " +
"WHERE currency = o_currency"
util.verifyExecPlan(sqlQuery)
}
@Test
def testJoinOnQueryLeft(): Unit = {
val orders = util.tableEnv.sqlQuery("SELECT * FROM Orders WHERE o_amount > 1000")
util.tableEnv.createTemporaryView("Orders2", orders)
val sqlQuery = "SELECT " +
"o_amount * rate as rate " +
"FROM Orders2 AS o, " +
"LATERAL TABLE (Rates(o.o_rowtime)) AS r " +
"WHERE currency = o_currency"
util.verifyExecPlan(sqlQuery)
}
/**
* Test versioned joins with more complicated query.
* Important thing here is that we have complex OR join condition
* and there are some columns that are not being used (are being pruned).
*/
@Test
def testComplexJoin(): Unit = {
val util = streamTestUtil()
util.addDataStream[(String, Int)]("Table3", 't3_comment, 't3_secondary_key)
util.addDataStream[(Timestamp, String, Long, String, Int)](
"Orders", 'o_rowtime.rowtime, 'o_comment, 'o_amount, 'o_currency, 'o_secondary_key)
util.addDataStream[(Timestamp, String, String, Int, Int)](
"RatesHistory", 'rowtime.rowtime, 'comment, 'currency, 'rate, 'secondary_key)
val rates = util.tableEnv
.sqlQuery("SELECT * FROM RatesHistory WHERE rate > 110")
.createTemporalTableFunction($"rowtime", $"currency")
util.addTemporarySystemFunction("Rates", rates)
val sqlQuery =
"SELECT * FROM " +
"(SELECT " +
"o_amount * rate as rate, " +
"secondary_key as secondary_key " +
"FROM Orders AS o, " +
"LATERAL TABLE (Rates(o_rowtime)) AS r " +
"WHERE currency = o_currency AND (rate > 120 OR secondary_key = o_secondary_key)), " +
"Table3 " +
"WHERE t3_secondary_key = secondary_key"
util.verifyExecPlan(sqlQuery)
}
@Test
def testUncorrelatedJoin(): Unit = {
expectedException.expect(classOf[TableException])
expectedException.expectMessage(containsString("Cannot generate a valid execution plan"))
val sqlQuery = "SELECT " +
"o_amount * rate as rate " +
"FROM Orders AS o, " +
"LATERAL TABLE (Rates(TIMESTAMP '2016-06-27 10:10:42.123')) AS r " +
"WHERE currency = o_currency"
util.verifyExplain(sqlQuery)
}
@Test
def testTemporalTableFunctionScan(): Unit = {
expectedException.expect(classOf[TableException])
expectedException.expectMessage(containsString("Cannot generate a valid execution plan"))
val sqlQuery = "SELECT * FROM LATERAL TABLE (Rates(TIMESTAMP '2016-06-27 10:10:42.123'))"
util.verifyExplain(sqlQuery)
}
}
| kl0u/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/join/TemporalFunctionJoinTest.scala | Scala | apache-2.0 | 4,979 |
package org.scalawiki.query
import org.scalawiki.dto.Page
import org.scalawiki.dto.cmd.Action
import org.scalawiki.dto.cmd.query.prop.{CategoryInfo, Prop}
import org.scalawiki.dto.cmd.query.{Query, TitlesParam}
import org.scalawiki.util.{HttpStub, MockBotSpec}
import org.specs2.mutable.Specification
import spray.util.pimpFuture
class ListCategoryMembersSpec extends Specification with MockBotSpec {
"get category members with continue" should {
"return category members" in {
val queryType = "categorymembers"
val response1 =
"""{ "query":
| { "categorymembers": [{ "pageid": 569559, "ns": 1, "title": "Talk:Welfare reform" }] },
| "continue": { "continue": "-||", "cmcontinue": "10|Stub|6674690" }}""".stripMargin
val response2 =
"""{ "limits": {"categorymembers": 500}, "query":
| { "categorymembers": [{"pageid": 4571809, "ns": 2, "title": "User:Formator"}] }}""".stripMargin
val commands = Seq(
HttpStub(Map("action" -> "query", "list" -> queryType, "cmlimit" -> "max",
"cmtitle" -> "Category:SomeCategory", "cmnamespace" -> "", "continue" -> ""), response1),
HttpStub(Map("action" -> "query", "list" -> queryType, "cmlimit" -> "max",
"cmtitle" -> "Category:SomeCategory", "cmnamespace" -> "",
"continue" -> "-||", "cmcontinue" -> "10|Stub|6674690"), response2)
)
val bot = getBot(commands: _*)
val result = bot.page("Category:SomeCategory").categoryMembers().await
result must have size 2
result(0) === Page(569559, Some(1), "Talk:Welfare reform")
result(1) === Page(4571809, Some(2), "User:Formator")
}
}
"get category number" should {
"return category number for 3 entries and missing entry" in {
val queryType = "categorymembers"
val response1 =
"""{
| "batchcomplete": "",
| "query": {
| "pages": {
| "-1": {
| "ns": 0,
| "title": "NoSuchTitle",
| "missing": ""
| },
| "736": {
| "pageid": 736,
| "ns": 0,
| "title": "Albert Einstein"
| },
| "50177636": {
| "pageid": 50177636,
| "ns": 14,
| "title": "Category:Foo",
| "categoryinfo": {
| "size": 5,
| "pages": 3,
| "files": 2,
| "subcats": 0
| }
| },
| "3108204": {
| "pageid": 3108204,
| "ns": 14,
| "title": "Category:Infobox templates",
| "categoryinfo": {
| "size": 29,
| "pages": 15,
| "files": 0,
| "subcats": 14
| }
| }
| }
| }
|}""".stripMargin
val commands = Seq(
HttpStub(Map("action" -> "query", "prop" -> "categoryinfo",
"titles" -> "Albert Einstein|Category:Foo|Category:Infobox_templates|NoSuchTitle", "continue" -> ""), response1)
)
val bot = getBot(commands: _*)
val query = Action(Query(TitlesParam(Seq("Albert Einstein", "Category:Foo", "Category:Infobox_templates", "NoSuchTitle")),
Prop(CategoryInfo)))
val result = bot.run(query).await
result must have size 4
result(0) === new Page(None, Some(0), "NoSuchTitle", missing = true)
result(1) === Page(736, Some(0), "Albert Einstein")
result(2) === Page(50177636, Some(14), "Category:Foo").copy(
categoryInfo = Some(org.scalawiki.dto.CategoryInfo(5, 3, 2, 0))
)
result(3) === Page(3108204, Some(14), "Category:Infobox templates").copy(
categoryInfo = Some(org.scalawiki.dto.CategoryInfo(29, 15, 0, 14))
)
}
}
}
| intracer/scalawiki | scalawiki-core/src/test/scala/org/scalawiki/query/ListCategoryMembersSpec.scala | Scala | apache-2.0 | 4,249 |
package skutek.abstraction.internals.aux
import scala.annotation.implicitNotFound
import skutek.abstraction.!!
@implicitNotFound(msg =
"Can't safely apply the partial handler, because some of computation's effects would leak. "+
"It is required that caller manually computes set difference between computation's effects and handler's effects. "+
"The resulting set is then passed as explicit type parameter to `handle[_]` or `handleWith[_]` methods. "+
"Compiler can only detect, if the resulting set is correct."+
"\\nIn this particular case, the set of effects requested by the computation is:"+
"\\n ${V}"+
"\\nAnd the set of effects to remain after application of the handler is:"+
"\\n ${U}"
)
//// asserts U = V \\ W
sealed trait CanHandle[U, V, W] {
def apply[A](eff: A !! V): A !! W with U
}
object CanHandle {
private[abstraction] val singleton = new CanHandle[Any, Any, Any] {
def apply[A](eff: A !! Any): A !! Any = eff
}
}
trait CanHandleExports {
implicit def CanHandle_evidence[U, V, W](implicit ev: (W with U) <:< V): CanHandle[U, V, W] =
CanHandle.singleton.asInstanceOf[CanHandle[U, V, W]]
}
| marcinzh/skutek | modules/core/src/main/scala/skutek/abstraction/internals/aux/CanHandle.scala | Scala | mit | 1,146 |
package databench.runner
import java.io.FileWriter
import java.io.File
import java.text.SimpleDateFormat
import java.util.Date
import java.io.PrintWriter
import databench.task.Generator
trait Reporter {
def report(turn: TurnSummary): TurnSummary
}
class CSVReporter(val tasksGenerators: Seq[Generator]) extends Reporter {
private val benchmarkDateTime =
new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss").format(new Date())
private val writer =
createWriter("benchmark-" + benchmarkDateTime + ".csv")
private val failureLogWriter =
createWriter("benchmark-" + benchmarkDateTime + "-failures.log")
private val possibleTasksResultsNames =
tasksGenerators.map(_.possibleResultsNames)
.flatten.sortBy(name => name)
writeHeader
def report(turn: TurnSummary) = {
val tasksResults = possibleTasksResultsNames.map(turn.numberOfResultsByName.getOrElse(_, 0))
val line = Seq(turn.bankName, turn.vms, turn.threadsPerVM, turn.tps, turn.failures) ++ tasksResults
write(writer)(line: _*)
writer.flush
writeFailuresLog(turn)
turn
}
private def writeFailuresLog(turn: TurnSummary) = {
val grouped = turn.exceptions.groupBy(ex => Option(ex.getMessage).getOrElse("NO MESSAGE"))
for ((message, group) <- grouped) {
write(failureLogWriter)(s"${turn.bankName}: ${group.size} exceptions with message $message")
group.head.printStackTrace(new PrintWriter(failureLogWriter))
}
failureLogWriter.flush()
}
private def writeHeader = {
val header = Seq("Subject", "VMs", "Threads/VM", "TPS", "Failures") ++ possibleTasksResultsNames
write(writer)(header: _*)
writer.flush
}
private def write(writer: FileWriter)(cells: Any*) =
writer.write(cells.map(_.toString).mkString(";") + "\\n")
private def createResultsFolderIfNecessary = {
val folder = new File("results")
if (!folder.exists)
folder.mkdir
}
private def createWriter(name: String) =
new FileWriter(createFile(name))
private def createFile(name: String) = {
createResultsFolderIfNecessary
val file =
new File("results/" + name)
require(file.createNewFile)
file
}
} | databench/databench | databench-runner/src/main/scala/databench/runner/Reporter.scala | Scala | lgpl-2.1 | 2,333 |
// Copyright: 2010 - 2017 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.core
import org.ensime.api._
import org.slf4j.LoggerFactory
import scala.collection.mutable.ListBuffer
class ImplicitAnalyzer(val global: RichPresentationCompiler) {
import global._
class ImplicitsTraverser(p: Position) extends Traverser {
val log = LoggerFactory.getLogger(getClass)
val implicits = new ListBuffer[ImplicitInfo]
override def traverse(t: Tree): Unit = {
val treeP = t.pos
if (p.overlaps(treeP) || treeP.includes(p)) {
try {
t match {
case t: ApplyImplicitView =>
implicits.append(
ImplicitConversionInfo(
treeP.startOrCursor,
treeP.endOrCursor,
SymbolInfo(t.fun.symbol)
)
)
case t: ApplyToImplicitArgs =>
val funIsImplicit = t.fun match {
case tt: ApplyImplicitView => true
case _ => false
}
implicits.append(
ImplicitParamInfo(
treeP.startOrCursor,
treeP.endOrCursor,
SymbolInfo(t.fun.symbol),
t.args.map { a =>
SymbolInfo(a.symbol)
},
funIsImplicit
)
)
case _ =>
}
} catch {
case e: Throwable =>
log.error("Error in AST traverse:", e)
}
super.traverse(t)
}
}
}
def implicitDetails(p: Position): List[ImplicitInfo] = {
val typed = new global.Response[global.Tree]
// AskLoadedTyped below doesn't wait, since this code should run in the pres. compiler thread.
global.askLoadedTyped(p.source, keepLoaded = true, typed)
typed.get.left.toOption match {
case Some(tree) =>
val traverser = new ImplicitsTraverser(p)
traverser.traverse(tree)
traverser.implicits.toList
case _ => List.empty
}
}
}
| ensime/ensime-server | core/src/main/scala/org/ensime/core/ImplicitAnalyzer.scala | Scala | gpl-3.0 | 2,160 |
/* sbt -- Simple Build Tool
* Copyright 2008, 2009, 2010 Mark Harrah
*/
package sbt
import java.io.File
import scala.xml.NodeSeq
final class IvyPaths(val baseDirectory: File, val ivyHome: Option[File])
{
def withBase(newBaseDirectory: File) = new IvyPaths(newBaseDirectory, ivyHome)
}
sealed trait IvyConfiguration
{
type This <: IvyConfiguration
def lock: Option[xsbti.GlobalLock]
def baseDirectory: File
def log: Logger
def withBase(newBaseDirectory: File): This
}
final class InlineIvyConfiguration(val paths: IvyPaths, val resolvers: Seq[Resolver], val otherResolvers: Seq[Resolver],
val moduleConfigurations: Seq[ModuleConfiguration], val localOnly: Boolean, val lock: Option[xsbti.GlobalLock],
val checksums: Seq[String], val log: Logger) extends IvyConfiguration
{
type This = InlineIvyConfiguration
def baseDirectory = paths.baseDirectory
def withBase(newBase: File) = new InlineIvyConfiguration(paths.withBase(newBase), resolvers, otherResolvers, moduleConfigurations, localOnly, lock, checksums, log)
def changeResolvers(newResolvers: Seq[Resolver]) = new InlineIvyConfiguration(paths, newResolvers, otherResolvers, moduleConfigurations, localOnly, lock, checksums, log)
}
final class ExternalIvyConfiguration(val baseDirectory: File, val file: File, val lock: Option[xsbti.GlobalLock], val log: Logger) extends IvyConfiguration
{
type This = ExternalIvyConfiguration
def withBase(newBase: File) = new ExternalIvyConfiguration(newBase, file, lock, log)
}
object IvyConfiguration
{
/** Called to configure Ivy when inline resolvers are not specified.
* This will configure Ivy with an 'ivy-settings.xml' file if there is one or else use default resolvers.*/
def apply(paths: IvyPaths, lock: Option[xsbti.GlobalLock], localOnly: Boolean, checksums: Seq[String], log: Logger): IvyConfiguration =
{
log.debug("Autodetecting configuration.")
val defaultIvyConfigFile = IvySbt.defaultIvyConfiguration(paths.baseDirectory)
if(defaultIvyConfigFile.canRead)
new ExternalIvyConfiguration(paths.baseDirectory, defaultIvyConfigFile, lock, log)
else
new InlineIvyConfiguration(paths, Resolver.withDefaultResolvers(Nil), Nil, Nil, localOnly, lock, checksums, log)
}
}
sealed trait ModuleSettings
{
def validate: Boolean
def ivyScala: Option[IvyScala]
def noScala: ModuleSettings
}
final case class IvyFileConfiguration(file: File, ivyScala: Option[IvyScala], validate: Boolean) extends ModuleSettings
{
def noScala = copy(ivyScala = None)
}
final case class PomConfiguration(file: File, ivyScala: Option[IvyScala], validate: Boolean) extends ModuleSettings
{
def noScala = copy(ivyScala = None)
}
final case class InlineConfiguration(module: ModuleID, moduleInfo: ModuleInfo, dependencies: Seq[ModuleID], ivyXML: NodeSeq = NodeSeq.Empty, configurations: Seq[Configuration] = Nil, defaultConfiguration: Option[Configuration] = None, ivyScala: Option[IvyScala] = None, validate: Boolean = false) extends ModuleSettings
{
def withConfigurations(configurations: Seq[Configuration]) = copy(configurations = configurations)
def noScala = copy(ivyScala = None)
}
final case class EmptyConfiguration(module: ModuleID, moduleInfo: ModuleInfo, ivyScala: Option[IvyScala], validate: Boolean) extends ModuleSettings
{
def noScala = copy(ivyScala = None)
}
object InlineConfiguration
{
def configurations(explicitConfigurations: Iterable[Configuration], defaultConfiguration: Option[Configuration]) =
if(explicitConfigurations.isEmpty)
{
defaultConfiguration match
{
case Some(Configurations.DefaultIvyConfiguration) => Configurations.Default :: Nil
case Some(Configurations.DefaultMavenConfiguration) => Configurations.defaultMavenConfigurations
case _ => Nil
}
}
else
explicitConfigurations
}
object ModuleSettings
{
def apply(ivyScala: Option[IvyScala], validate: Boolean, module: => ModuleID, moduleInfo: => ModuleInfo)(baseDirectory: File, log: Logger): ModuleSettings =
{
log.debug("Autodetecting dependencies.")
val defaultPOMFile = IvySbt.defaultPOM(baseDirectory)
if(defaultPOMFile.canRead)
new PomConfiguration(defaultPOMFile, ivyScala, validate)
else
{
val defaultIvy = IvySbt.defaultIvyFile(baseDirectory)
if(defaultIvy.canRead)
new IvyFileConfiguration(defaultIvy, ivyScala, validate)
else
{
log.warn("No dependency configuration found, using defaults.")
new EmptyConfiguration(module, moduleInfo, ivyScala, validate)
}
}
}
}
| kuochaoyi/xsbt | ivy/IvyConfigurations.scala | Scala | bsd-3-clause | 4,449 |
package com.twitter.finagle.http2
import com.twitter.finagle
import com.twitter.finagle.http.{AbstractHttp1EndToEndTest, Request, Response}
import com.twitter.finagle.Service
import com.twitter.util.Future
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
/**
* This is really a HTTP/1.x test suite because the server only speaks HTTP/1.x
*/
@RunWith(classOf[JUnitRunner])
class ClientFailUpgradeTest extends AbstractHttp1EndToEndTest {
def implName: String = "http/2 client, http/1.1 server"
def clientImpl(): finagle.Http.Client =
finagle.Http.client.configuredParams(Http2)
def serverImpl(): finagle.Http.Server =
finagle.Http.server
def featureImplemented(feature: Feature): Boolean = true
test("Upgrade counters are not incremented") {
val client = nonStreamingConnect(Service.mk { req: Request =>
Future.value(Response())
})
await(client(Request("/")))
assert(!statsRecv.counters.contains(Seq("server", "upgrade", "success")))
assert(!statsRecv.counters.contains(Seq("client", "upgrade", "success")))
await(client.close())
}
}
| spockz/finagle | finagle-http2/src/test/scala/com/twitter/finagle/http2/ClientFailUpgradeTest.scala | Scala | apache-2.0 | 1,114 |
package com.github.jlprat.ninetynine.p02
/**
* Created by @jlprat on 29/02/2016.
*
* P02: Find the penultimate element of a list (last but one)
* Difficulty: *
*/
object P02 {
/**
* This function should return the penultimate element of a list
* @param list the list we want to retrieve the element from
* @tparam A the type of the list
* @return the penultimate element in the list
*/
final def penultimate[A](list: List[A]): A = ???
/**
* This function should return the penultimate element of a list
* @param list the list we want to retrieve the element from
* @tparam A the type of the list
* @return the Option value containing the penultimate element in the list if there is a penultimate
*/
final def penultimateOption[A](list: List[A]): Option[A] = ???
/**
* This is a way to extend classes, in this case, we add new functionality to the already existing Lists
* @param list the type we want to extend
* @tparam A the type of the list, here no restriction
*/
implicit class ListOps[A](list: List[A]) {
/**
* This function should return the penultimate element of a list
* @return the Option value containing the penultimate element in the list if there is a penultimate
*/
def penultimate: Option[A] = ???
}
}
| jlprat/99-scala-problems | src/main/scala/com/github/jlprat/ninetynine/p02/P02.scala | Scala | apache-2.0 | 1,330 |
package com.spike.giantdataanalysis.spark.example.mllib
/**
* <pre>
* CoordinateMatrix的示例
*
* 在sbt shell中运行:run vector.DistributedCoordinateMatrixExample
*
* @author zhoujiagen
*/
object DistributedCoordinateMatrixExample extends App {
// 获取Spark上下文
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
val conf = new SparkConf().setMaster("local").setAppName("Spark MLlib Example")
val sc = new SparkContext(conf)
// 模拟数据
import org.apache.spark.rdd.RDD
val rawData: Array[Double] = Array(1, 2, 3, 4, 5)
val rawRDDData: RDD[Double] = sc.parallelize(rawData)
import org.apache.spark.mllib.linalg.distributed.{ CoordinateMatrix, MatrixEntry }
// 创建
var i: Long = -1
var j: Long = -1
var value: Double = 0.0
// Each entry is a tuple of (i: Long, j: Long, value: Double), where i is the row index, j is the column index, and value is the entry value.
// MatrixEntry is a wrapper over (Long, Long, Double)
val entries: RDD[MatrixEntry] = rawRDDData.map { data ⇒ MatrixEntry(i + 1, j + 1, i + j) }
val mat: CoordinateMatrix = new CoordinateMatrix(entries)
// Get its size.
val m = mat.numRows()
val n = mat.numCols()
println(m)
println(n)
val indexedRowMatrix = mat.toIndexedRowMatrix()
} | zhoujiagen/giant-data-analysis | data-management-infrastructure/scala-infrastructure-apache-spark/src/main/scala/com/spike/giantdataanalysis/spark/example/mllib/DistributedCoordinateMatrixExample.scala | Scala | mit | 1,306 |
package edu.indiana.ise.spidal.pca
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.mllib.feature.PCA
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.regression.{LabeledPoint, LinearRegressionWithSGD}
/**
* Created by vibhatha on 7/16/17.
*/
object DemoPCA {
def main(args: Array[String]) {
println("Spark PCA")
val conf = new SparkConf().setMaster("local").setAppName("My App")
val sc = new SparkContext(conf)
val data = sc.textFile("file:/home/vibhatha/ds/data/pca/matrix2").map { line =>
val parts = line
LabeledPoint(1, Vectors.dense(parts.split(' ').map(_.toDouble)))
}.cache()
val splits = data.randomSplit(Array(0.8, 0.2), seed = 11L)
val training = splits(0).cache()
val test = splits(1)
val pca = new PCA(training.first().features.size / 2).fit(data.map(_.features))
val training_pca = training.map(p => p.copy(features = pca.transform(p.features)))
val test_pca = test.map(p => p.copy(features = pca.transform(p.features)))
val arr1 : Array[LabeledPoint] = training_pca.collect()
var a = 0;
for(a <- 0 to arr1.length-1){
println(arr1(a));
}
}
}
| vibhatha/Spark | SparkMlibBenchmark/src/main/scala/edu/indiana/ise/spidal/pca/DemoPCA.scala | Scala | mit | 1,199 |
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations.calculations
import uk.gov.hmrc.ct.CATO01
import uk.gov.hmrc.ct.box.CtTypeConverters
import uk.gov.hmrc.ct.computations.{CP43, CP502, CP509, CP510}
trait NonTradeIncomeCalculator extends CtTypeConverters {
def nonTradeIncomeCalculation(cp43: CP43,
cp502: CP502,
cp509: CP509,
cp510: CP510): CATO01 = {
CATO01(cp43 + cp502 + cp509 + cp510)
}
}
| keithhall/ct-calculations | src/main/scala/uk/gov/hmrc/ct/computations/calculations/NonTradeIncomeCalculator.scala | Scala | apache-2.0 | 1,091 |
package org.gark87.yajom.base
import org.gark87.yajom.annotations.{ReturnOnNull, PredicateToFactory}
import scala.reflect.ClassTag
class YajomCollection[T <: java.lang.Object](val source: java.util.Collection[T]) {
def any[A <: T : ClassTag](): T = {
val it = source.iterator()
if (it.hasNext)
it.next
else {
val newValue = implicitly[ClassTag[A]].runtimeClass.newInstance().asInstanceOf[A]
source.add(newValue)
newValue
}
}
def findTyped[A <: T : ClassTag](@ReturnOnNull("predicate") predicate: A => Boolean, @PredicateToFactory("predicate") create: () => A = YajomCollection.defaultFactory): A = {
val it = source.iterator()
while (it.hasNext) {
val next = it.next()
next match {
case typedNext: A =>
if (predicate(typedNext))
return typedNext
case _ =>
}
}
val elem: A = create()
source.add(elem)
elem
}
def find(@ReturnOnNull("predicate") predicate: T => Boolean)(implicit @PredicateToFactory("predicate") create: () => T = YajomCollection.defaultFactory): T = {
val it = source.iterator()
while (it.hasNext) {
val next = it.next()
if (predicate(next))
return next
}
val elem: T = create()
source.add(elem)
elem
}
}
object YajomCollection {
val defaultFactory: () => Nothing = () => { throw new IllegalStateException("Should be replaced by YAJOM") }
}
| gark87/yajom | yajom-macros/src/main/scala/org/gark87/yajom/base/YajomCollection.scala | Scala | mit | 1,440 |
package synereo.client.handlers
import diode.{ActionHandler, ActionResult, ModelRW}
import synereo.client.logger
import synereo.client.rootmodels.AppRootModel
/**
* Created by bhagyashree.b on 2016-07-28.
*/
case class ShowServerError(getError: String)
case class ToggleImageUploadModal()
case class ToggleAboutInfoModal()
case class ToggleNodeSettingModal()
case class ToggleNewMessageModal()
case class CloseAllPopUp()
case class SetPreventNavigation()
case class UnsetPreventNavigation()
class AppHandler[M](modelRW: ModelRW[M, AppRootModel]) extends ActionHandler(modelRW) {
override def handle: PartialFunction[Any, ActionResult[M]] = {
case ShowServerError(errorMsg) =>
updated(value.copy(isServerError = true, serverErrorMsg = errorMsg))
case ToggleImageUploadModal() =>
// logger.log.debug(s"ToggleImageUploadModal in handler ${value.showProfileImageUploadModal}")
updated(value.copy(showProfileImageUploadModal = !value.showProfileImageUploadModal))
case ToggleNewMessageModal() =>
// logger.log.debug(s"ToggleNewMessageModal in handler ${value.showProfileImageUploadModal}")
updated(value.copy(showNewMessageModal = !value.showNewMessageModal))
case ToggleAboutInfoModal() =>
// logger.log.debug(s"ToggleAboutInfoModal in handler ${value.showAboutInfoModal}")
updated(value.copy(showAboutInfoModal = !value.showAboutInfoModal))
case ToggleNodeSettingModal() =>
// logger.log.debug(s"ToggleNodeSettingModal in handler ${value.showNodeSettingModal}")
updated(value.copy(showNodeSettingModal = !value.showNodeSettingModal))
case CloseAllPopUp() =>
// logger.log.debug(s"closing all popup in app module")
updated(value.copy(showNodeSettingModal = false, showAboutInfoModal = false, showProfileImageUploadModal = false, showNewMessageModal = false))
case SetPreventNavigation() =>
updated(value.copy(preventNavigation = true))
case UnsetPreventNavigation() =>
updated(value.copy(preventNavigation = false))
}
} | LivelyGig/ProductWebUI | sclient/src/main/scala/synereo/client/Handlers/AppHandler.scala | Scala | apache-2.0 | 2,143 |
package com.coiney.play.json.extras
import org.joda.time.DateTime
import org.scalatest.{Matchers, WordSpecLike}
import play.api.libs.json._
class JodaTimeSpec extends WordSpecLike
with Matchers {
import JodaTime._
val testMap: Map[Long, String] = Map(
397100000000L -> "1982-08-02T01:33:20.000Z",
1083360000000L -> "2004-04-30T21:20:00.000Z",
466700000000L -> "1984-10-15T14:53:20.000Z",
159780000000L -> "1975-01-24T07:20:00.000Z",
1480840000000L -> "2016-12-04T08:26:40.000Z",
1455280000000L -> "2016-02-12T12:26:40.000Z",
1341860000000L -> "2012-07-09T18:53:20.000Z",
1460760000000L -> "2016-04-15T22:40:00.000Z",
584780000000L -> "1988-07-13T06:53:20.000Z",
131400000000L -> "1974-03-01T20:00:00.000Z"
)
"JodaTime" should {
"deserialise ISO 8601-compliant strings into Joda DateTime Objects" in {
testMap.foreach { case (millis, isoString) =>
Json.fromJson[DateTime](JsString(isoString)).map(_.getMillis) should be(JsSuccess(new DateTime(millis)).map(_.getMillis))
}
}
"serialise Joda DateTime objects into ISO 8601-compliant string representations" in {
testMap.foreach { case (millis, isoString) =>
Json.toJson(new DateTime(millis)) should be (JsString(isoString))
}
}
"fail gracefully for non ISO 8601-compliant strings" in {
assert(Json.fromJson[DateTime](JsString("faulty string")).isInstanceOf[JsError])
assert(Json.fromJson[DateTime](JsString("2016-12-0408:26:40.000Z")).isInstanceOf[JsError])
assert(Json.fromJson[DateTime](JsString("2016-12-04T08:2640.000Z")).isInstanceOf[JsError])
assert(Json.fromJson[DateTime](JsString("2016-1204T08:26:40.000Z")).isInstanceOf[JsError])
assert(Json.fromJson[DateTime](JsString("2016-12-04T0826:40.000Z")).isInstanceOf[JsError])
}
"fail gracefully for non strings" in {
assert(Json.fromJson[DateTime](JsNumber(100)).isInstanceOf[JsError])
assert(Json.fromJson[DateTime](JsArray(JsString("1988-07-13T06:53:20.000Z") :: JsString("1988-07-13T06:53:20.000Z") :: Nil)).isInstanceOf[JsError])
}
}
}
| Coiney/play-json-extras | src/test/scala/com/coiney/play/json/extras/JodaTimeSpec.scala | Scala | bsd-3-clause | 2,136 |
package fpinscala.datastructures
import org.scalatest.prop.TableDrivenPropertyChecks._
import org.scalatest.{FeatureSpec, Matchers}
/**
* Created by michal on 2/15/15.
*/
class TreeTest extends FeatureSpec with Matchers {
feature ("The size function"){
import fpinscala.datastructures.Tree._
val dataset =
Table(
("tree" ,"size" ),
(Leaf(1) , 1 ),
(Branch(Leaf(1), Leaf(2)) , 3 ),
(Branch(Leaf(1), Branch(Leaf(1), Leaf(2))) , 5 )
)
scenario ("size should return the size of the tree"){
forAll (dataset) { (tree: Tree[Int], size: Int) =>
Tree.size(tree) shouldEqual size
}
}
scenario ("sizeViaFold should return the size of the tree"){
forAll (dataset) { (tree: Tree[Int], size: Int) =>
Tree.sizeViaFold(tree) shouldEqual size
}
}
}
feature ("The maximum function"){
import fpinscala.datastructures.Tree._
val dataset =
Table(
("tree" ,"max" ),
(Leaf(1) , 1 ),
(Branch(Leaf(1), Leaf(2)) , 2 ),
(Branch(Leaf(1), Branch(Leaf(1), Leaf(4))) , 4 )
)
scenario ("max should return the max value of tree"){
forAll (dataset) { (tree: Tree[Int], max: Int) =>
Tree.maximum(tree) shouldEqual max
}
}
scenario ("maxViaFold should return the max value of tree"){
forAll (dataset) { (tree: Tree[Int], max: Int) =>
Tree.maximumViaFold(tree) shouldEqual max
}
}
}
feature ("The depth function"){
import fpinscala.datastructures.Tree._
val dataset =
Table(
("tree" ,"depth" ),
(Leaf(1) , 1 ),
(Branch(Leaf(1), Leaf(1)) , 2 ),
(Branch(Leaf(1), Branch(Leaf(1), Branch(Leaf(1), Branch(Leaf(3), Leaf(2))))) , 5 )
)
scenario ("depth should return the max depth of tree"){
forAll (dataset) { (tree: Tree[Int], max: Int) =>
Tree.depth(tree) shouldEqual max
}
}
scenario ("depthViaFold should return the max depth of tree"){
forAll (dataset) { (tree: Tree[Int], max: Int) =>
Tree.depthViaFold(tree) shouldEqual max
}
}
}
feature ("The map function"){
import fpinscala.datastructures.Tree._
def f = (i: Int) => 2 * i
val dataset =
Table(
("tree", "f", "out" ),
(Leaf(1), f, Leaf(2) ),
(Branch(Leaf(2), Leaf(2)), f, Branch(Leaf(4), Leaf(4)) )
)
scenario ("map should return the mapped f to each leaf"){
forAll (dataset) { (in: Tree[Int], f: (Int)=> Int, out: Tree[Int]) =>
Tree.map(in)(f) shouldEqual out
}
}
scenario ("mapViaFold should return the mapped f to each leaf"){
forAll (dataset) { (in: Tree[Int], f: (Int)=> Int, out: Tree[Int]) =>
Tree.mapViaFold(in)(f) shouldEqual out
}
}
}
}
| mkunikow/fpinscala | exercises/src/test/scala/fpinscala/datastructures/TreeTest.scala | Scala | mit | 3,316 |
package com.dt.scala.oop
/**
* Author: Wang Jialin
* Contact Information:
* WeChat: 18610086859
* QQ: 1740415547
* Email: [email protected]
* Tel: 18610086859
*/
class AbstractClassOps{
var id : Int = _
}
abstract class SuperTeacher(val name : String){
var id : Int
var age : Int
def teach
}
class TeacherForMaths(name : String) extends SuperTeacher(name){
override var id = name.hashCode()
override var age = 29
override def teach{
println("Teaching!!!")
}
}
object AbstractClassOps{
def main(args: Array[String]) {
val teacher = new TeacherForMaths("Spark")
teacher.teach
println("teacher.id" + ":" + teacher.id)
println(teacher.name + ":" + teacher.age)
}
} | slieer/scala-tutorials | src/main/scala/com/dt/scala/oop/AbstractClassOps.scala | Scala | apache-2.0 | 764 |
/* XStreamUtils.scala
Copyright 2011 Tommy Skodje (http://www.antares.no)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package no.antares.xstream
import com.thoughtworks.xstream.XStream
import com.thoughtworks.xstream.io.xml.DomDriver
import com.thoughtworks.xstream.io.json.JettisonMappedXmlDriver
import collection.mutable.ListBuffer
import com.thoughtworks.xstream.io.HierarchicalStreamDriver
/** Simple wrapper for commonly used XStream functions */
class XStreamUtils(
val classAliases: ListBuffer[ ClassAlias ],
val fieldAliases: ListBuffer[ FieldAlias ]
) {
def this() = this( new ListBuffer[ ClassAlias ], new ListBuffer[ FieldAlias ] )
def fromJson( s: String ) : Object = toObject( s, new JettisonMappedXmlDriver() );
def fromXml( s: String ) : Object = toObject( s, new DomDriver() );
def alias( name: String, clazz: Class[_] ): XStreamUtils = {
classAliases.append( new ClassAlias( name, clazz ) )
this
}
def aliasField( clazz: Class[_], attributeName: String, alias: String ): XStreamUtils = {
fieldAliases.append( new FieldAlias( clazz, attributeName, alias ) )
this
}
private def toObject( s: String, hierarchicalStreamDriver: HierarchicalStreamDriver ) : Object = {
val xstream = new XStream( hierarchicalStreamDriver );
classAliases.foreach( alias => xstream.alias( alias.xmlName, alias.clazz ) )
fieldAliases.foreach( alias => xstream.aliasAttribute( alias.clazz, alias.attributeName, alias.alias ) )
return xstream.fromXML( s );
}
}
class ClassAlias( val xmlName: String, val clazz: Class[_] ) {};
class FieldAlias( val clazz: Class[_], val attributeName: String, val alias: String ) {};
object XStreamUtils {
def toJson( o: Object ): String = {
val xstream = new XStream( new JettisonMappedXmlDriver() );
return xstream.toXML( o );
}
def toXml( o: Object ): String = {
var xstream = new XStream( new DomDriver() );
return xstream.toXML( o );
}
def fromJson( s: String ): Object = ( new XStreamUtils() ).fromJson( s );
def fromXml( s: String ): Object = ( new XStreamUtils() ).fromXml( s );
} | 2my/test-data-control | src/main/scala/no/antares/xstream/XStreamUtils.scala | Scala | apache-2.0 | 2,620 |
/* cowjac © 2012 David Given
* This file is licensed under the Simplified BSD license. Please see
* COPYING.cowjac for the full text.
*/
package com.cowlark.cowjac
import scala.collection.JavaConversions.asScalaBuffer
import scala.collection.JavaConversions.collectionAsScalaIterable
import scala.collection.immutable.HashMap
import soot.jimple.toolkits.annotation.tags.NullCheckTag
import soot.jimple.AbstractJimpleValueSwitch
import soot.jimple.AbstractStmtSwitch
import soot.jimple.AddExpr
import soot.jimple.AndExpr
import soot.jimple.ArrayRef
import soot.jimple.AssignStmt
import soot.jimple.BinopExpr
import soot.jimple.CastExpr
import soot.jimple.CaughtExceptionRef
import soot.jimple.ClassConstant
import soot.jimple.CmpExpr
import soot.jimple.CmpgExpr
import soot.jimple.CmplExpr
import soot.jimple.DefinitionStmt
import soot.jimple.DivExpr
import soot.jimple.DoubleConstant
import soot.jimple.EnterMonitorStmt
import soot.jimple.EqExpr
import soot.jimple.ExitMonitorStmt
import soot.jimple.FieldRef
import soot.jimple.FloatConstant
import soot.jimple.GeExpr
import soot.jimple.GotoStmt
import soot.jimple.GtExpr
import soot.jimple.IdentityStmt
import soot.jimple.IfStmt
import soot.jimple.InstanceFieldRef
import soot.jimple.InstanceInvokeExpr
import soot.jimple.InstanceOfExpr
import soot.jimple.IntConstant
import soot.jimple.InterfaceInvokeExpr
import soot.jimple.InvokeExpr
import soot.jimple.InvokeStmt
import soot.jimple.LeExpr
import soot.jimple.LengthExpr
import soot.jimple.LongConstant
import soot.jimple.LtExpr
import soot.jimple.MulExpr
import soot.jimple.NeExpr
import soot.jimple.NegExpr
import soot.jimple.NewArrayExpr
import soot.jimple.NewExpr
import soot.jimple.NullConstant
import soot.jimple.OrExpr
import soot.jimple.ParameterRef
import soot.jimple.RemExpr
import soot.jimple.ReturnStmt
import soot.jimple.ReturnVoidStmt
import soot.jimple.ShlExpr
import soot.jimple.ShrExpr
import soot.jimple.SpecialInvokeExpr
import soot.jimple.StaticFieldRef
import soot.jimple.StaticInvokeExpr
import soot.jimple.StringConstant
import soot.jimple.SubExpr
import soot.jimple.ThisRef
import soot.jimple.ThrowStmt
import soot.jimple.UnopExpr
import soot.jimple.UshrExpr
import soot.jimple.VirtualInvokeExpr
import soot.jimple.XorExpr
import soot.tagkit.AnnotationStringElem
import soot.tagkit.VisibilityAnnotationTag
import soot.toolkits.graph.BriefUnitGraph
import soot.ArrayType
import soot.BooleanType
import soot.ByteType
import soot.CharType
import soot.ClassMember
import soot.DoubleType
import soot.FloatType
import soot.IntType
import soot.Local
import soot.LongType
import soot.PrimType
import soot.RefLikeType
import soot.RefType
import soot.ShortType
import soot.SootClass
import soot.SootField
import soot.SootFieldRef
import soot.SootMethod
import soot.SootMethodRef
import soot.Type
import soot.TypeSwitch
import soot.VoidType
import soot.AbstractJasminClass
import soot.tagkit.Host
import soot.Scene
import soot.jimple.TableSwitchStmt
import soot.NullType
import soot.jimple.LookupSwitchStmt
object Translator extends Object with SootExtensions with Utils
{
private var namecache = HashMap[String, String]()
private val splitpoints = Array('.', '/')
private def reformName(jname: String, separator: String): String =
jname.split(splitpoints).reduceLeft(_ + separator + _)
private val javaToCXX = Memoize((s: String) => "::" + reformName(s, "::"))
private def className(s: String) =
javaToCXX(s)
private def className(c: SootClass): String =
className(c.getName)
private def getNativeTag(m: Host): String =
{
for (tag <- m.getTags if tag.getName == "VisibilityAnnotationTag")
{
val vat = tag.asInstanceOf[VisibilityAnnotationTag]
for (a <- vat.getAnnotations if a.getType == "Lcom/cowlark/cowjac/harmony/Native;")
{
val s = a.getElemAt(0).asInstanceOf[AnnotationStringElem]
return s.getValue
}
}
return null
}
private def getNativeMethodName(sootclass: SootClass, signature: String): String =
{
if (!sootclass.declaresMethod(signature))
return null
return getNativeTag(sootclass.getMethod(signature))
}
private def getNativeFieldName(sootclass: SootClass, signature: String): String =
{
if (!sootclass.declaresField(signature))
return null
return getNativeTag(sootclass.getField(signature))
}
private def getRecursiveNativeMethodName(sootclass: SootClass, signature: String): String =
{
var c = sootclass
while (true)
{
var n = getNativeMethodName(c, signature)
if (n != null)
return n
if (!c.hasSuperclass)
return null
c = c.getSuperclass
}
return null /* oddly necessary */
}
private def getRecursiveNativeFieldName(sootclass: SootClass, signature: String): String =
{
var c = sootclass
while (true)
{
var n = getNativeFieldName(c, signature)
if (n != null)
return n
if (!c.hasSuperclass)
return null
c = c.getSuperclass
}
return null /* oddly necessary */
}
private def methodNameImpl(m: SootMethod): String =
{
/* If the method has a specific native name, use that. */
var nativename = getRecursiveNativeMethodName(m.getDeclaringClass,
m.getSubSignature)
if (nativename != null)
return nativename
/* Otherwise, mangle the name. */
def hex2(i: Integer) =
(if (i < 16) "0" else "") + Integer.toHexString(i)
val sb = new StringBuilder("m_")
val namewithsignature = m.getName + "_" +
AbstractJasminClass.jasminDescriptorOf(m.makeRef)
for (c <- namewithsignature)
{
if (c.isLetterOrDigit)
sb += c
else
{
sb += '_'
sb ++= hex2(c.toInt)
}
}
return sb.toString
}
private val methodName = Memoize(methodNameImpl)
private def methodName(m: SootMethodRef): String =
methodName(m.resolve)
private def fieldNameImpl(m: SootField): String =
{
var nativename = getRecursiveNativeFieldName(m.getDeclaringClass,
m.getSubSignature)
if (nativename != null)
return nativename
def hex2(i: Integer) =
(if (i < 16) "0" else "") + Integer.toHexString(i)
val sb = new StringBuilder("f_")
val name = m.getName
for (c <- name)
{
if (c.isLetterOrDigit)
sb += c
else
{
sb += '_'
sb ++= hex2(c.toInt)
}
}
return sb.toString
}
private val fieldName = Memoize(fieldNameImpl)
private def fieldName(m: SootFieldRef): String =
fieldName(m.resolve)
private def translateModifier(cm: ClassMember, p: Printer)
{
if (cm.isPrivate)
p.print("private: ")
else
p.print("public: ")
if (cm.isStatic)
p.print("static ")
}
private def translateType(t: Type, p: Printer)
{
object TS extends TypeSwitch
{
override def caseVoidType(t: VoidType) = p.print("void")
override def caseBooleanType(t: BooleanType) = p.print("jboolean")
override def caseByteType(t: ByteType) = p.print("jbyte")
override def caseCharType(t: CharType) = p.print("jchar")
override def caseShortType(t: ShortType) = p.print("jshort")
override def caseIntType(t: IntType) = p.print("jint")
override def caseLongType(t: LongType) = p.print("jlong")
override def caseFloatType(t: FloatType) = p.print("jfloat")
override def caseDoubleType(t: DoubleType) = p.print("jdouble")
override def caseNullType(t: NullType) = p.print("::java::lang::Object*")
override def caseArrayType(t: ArrayType)
{
if (t.getElementType.isInstanceOf[RefLikeType])
p.print("::com::cowlark::cowjac::ObjectArray*")
else
{
p.print("::com::cowlark::cowjac::ScalarArray< ")
t.getElementType.apply(TS)
p.print(" >*")
}
}
override def caseRefType(t: RefType)
{
p.print(className(t.getSootClass), "*")
}
override def defaultCase(t: Type) = assert(false)
}
t.apply(TS)
}
private def classConstant(t: Type): String =
{
var result: String = null;
object TS extends TypeSwitch
{
override def caseBooleanType(t: BooleanType) = result = "::com::cowlark::cowjac::PrimitiveBooleanClassConstant"
override def caseByteType(t: ByteType) = result = "::com::cowlark::cowjac::PrimitiveByteClassConstant"
override def caseCharType(t: CharType) = result = "::com::cowlark::cowjac::PrimitiveCharClassConstant"
override def caseShortType(t: ShortType) = result = "::com::cowlark::cowjac::PrimitiveShortClassConstant"
override def caseIntType(t: IntType) = result = "::com::cowlark::cowjac::PrimitiveIntClassConstant"
override def caseLongType(t: LongType) = result = "::com::cowlark::cowjac::PrimitiveLongClassConstant"
override def caseFloatType(t: FloatType) = result = "::com::cowlark::cowjac::PrimitiveFloatClassConstant"
override def caseDoubleType(t: DoubleType) = result = "::com::cowlark::cowjac::PrimitiveDoubleClassConstant"
override def caseArrayType(t: ArrayType)
{
t.getArrayElementType().apply(TS)
result += "->getArrayType(&F)"
}
override def caseRefType(t: RefType) =
result = className(t.getSootClass) + "::getClassConstant(&F)"
override def defaultCase(t: Type) = assert(false)
}
t.apply(TS)
return result
}
def translate(sootclass: SootClass, ps: PrintSet)
{
var stringconstants = Map.empty[String, Integer]
def translateFieldDeclaration(field: SootField)
{
val isref = field.getType.isInstanceOf[RefLikeType]
ps.h.print("\\t")
translateModifier(field, ps.h)
translateType(field.getType, ps.h)
ps.h.print(" (", fieldName(field), ");\\n")
}
def translateFieldDefinition(field: SootField)
{
if (field.isStatic)
{
val isref = field.getType.isInstanceOf[RefLikeType]
translateType(field.getType, ps.ch)
ps.ch.print(" (", className(field.getDeclaringClass), "::",
fieldName(field), ");\\n")
}
}
def translateMethodDeclaration(method: SootMethod)
{
var pure = isMethodPure(method)
/* If we're *not* pure, and this method has no implementation,
* then there's no need to actually declare the method here, as
* it's already declared; and it will confuse C++.
*/
if (!pure && (method.isAbstract || sootclass.isInterface))
return
/* Ordinary method. */
ps.h.print("\\t")
translateModifier(method, ps.h)
if (!method.isPrivate && !method.isStatic)
ps.h.print("virtual ")
translateType(method.getReturnType, ps.h)
ps.h.print(" ", methodName(method))
ps.h.print("(com::cowlark::cowjac::Stackframe*")
for (to <- method.getParameterTypes)
{
val t = to.asInstanceOf[Type]
ps.h.print(", ")
translateType(t, ps.h)
}
ps.h.print(")")
if (pure)
ps.h.print(" = 0")
ps.h.print(";\\n")
}
def translateMethodDefinition(method: SootMethod)
{
val body = method.getActiveBody
ps.c.print("\\n/* ", method.getSubSignature, " */\\n")
translateType(method.getReturnType, ps.c)
ps.c.print(" (", className(method.getDeclaringClass), "::",
methodName(method),
")(com::cowlark::cowjac::Stackframe* parentFrame")
for (i <- 0 until method.getParameterCount)
{
val t = method.getParameterType(i)
ps.c.print(", ")
translateType(t, ps.c)
ps.c.print(" p", String.valueOf(i))
}
ps.c.print(")\\n{\\n")
/* If this is a static method, ensure the class has been
* initialised. */
if (method.isStatic)
ps.c.print("\\t", className(sootclass), "::classInit(parentFrame);\\n")
/* Declare stackframe structure. */
ps.c.print("\\tstruct frame : public com::cowlark::cowjac::Stackframe\\n")
ps.c.print("\\t{\\n");
ps.c.print("\\t\\tframe(com::cowlark::cowjac::Stackframe* p):\\n")
ps.c.print("\\t\\t\\tcom::cowlark::cowjac::Stackframe(p)\\n")
val reflike = body.getLocals.filter(s => s.getType.isInstanceOf[RefLikeType])
ps.c.print("\\t\\t{\\n")
if (!reflike.isEmpty)
{
ps.c.print("\\t\\t\\tmemset(&f",
reflike.first.getName,
", 0, sizeof(f",
reflike.first.getName,
") * ",
String.valueOf(reflike.size),
");\\n")
}
ps.c.print("\\t\\t}\\n")
ps.c.print("\\n")
if (!reflike.isEmpty)
{
ps.c.print("\\t\\tvoid mark()\\n")
ps.c.print("\\t\\t{\\n")
ps.c.print("\\t\\t\\tmarkMany(&f",
reflike.first.getName, ", ",
String.valueOf(reflike.size),
");\\n")
ps.c.print("\\t\\t}\\n")
}
ps.c.print("\\n")
ps.c.print("public:\\n")
for (local <- reflike)
{
val t = local.getType
ps.c.print("\\t\\t::com::cowlark::cowjac::ContainsReferences* ",
"f", local.getName, ";\\n")
}
ps.c.print("\\t};\\n");
ps.c.print("\\tframe F(parentFrame);\\n")
ps.c.print("\\t::com::cowlark::cowjac::Object* caughtexception;\\n")
ps.c.print("\\n")
/* Declare locals. */
for (local <- body.getLocals)
{
val t = local.getType
ps.c.print("\\t/* Real type: ", t.toString, " */\\n")
ps.c.print("\\t")
translateType(Type.toMachineType(t), ps.c)
ps.c.print(" j", local.getName, " = 0;\\n")
}
/* The method body itself. */
var labels = HashMap.empty[soot.Unit, Integer]
val ug = new BriefUnitGraph(body)
var notnull = false
def label(unit: soot.Unit): String =
{
val s = labels.get(unit)
if (s != None)
return "L" + s.get
val i = labels.size
labels += (unit -> i)
return "L" + i
}
object NS extends TypeSwitch
{
override def caseRefType(t: RefType) =
ps.c.print(javaToCXX(t.getSootClass.getName))
override def defaultCase(t: Type) = assert(false)
}
object VS extends AbstractJimpleValueSwitch
{
override def caseIntConstant(s: IntConstant) =
ps.c.print("(jint)0x", s.value.toHexString)
override def caseLongConstant(s: LongConstant) =
ps.c.print("(jlong)0x", s.value.toHexString, "LL")
override def caseFloatConstant(s: FloatConstant) =
{
val value = s.value
if (value.isNegInfinity)
ps.c.print("-INFINITY")
else if (value.isPosInfinity)
ps.c.print("INFINITY")
else if (value.isNaN)
ps.c.print("NAN")
else
ps.c.print(s.value.toString, "f")
}
override def caseDoubleConstant(s: DoubleConstant) =
{
val value = s.value
if (value.isNegInfinity)
ps.c.print("-INFINITY")
else if (value.isPosInfinity)
ps.c.print("INFINITY")
else if (value.isNaN)
ps.c.print("NAN")
else
ps.c.print(s.value.toString)
}
override def caseStringConstant(s: StringConstant) =
{
val cido = stringconstants.get(s.value)
val cid =
if (cido == None)
{
val n = stringconstants.size
stringconstants += (s.value -> n)
n
}
else
cido.get
ps.c.print("sc", String.valueOf(cid))
}
override def caseNullConstant(s: NullConstant) =
ps.c.print("0")
override def caseClassConstant(s: ClassConstant) =
{
/* s.value is a path-style classname, with / separators.
* We want one with . instead. */
val name = s.value.replace('/', '.')
val sc = Scene.v.getSootClass(name)
ps.c.print(classConstant(sc.getType))
}
override def caseThisRef(v: ThisRef) =
ps.c.print("this")
override def caseLocal(v: Local) =
ps.c.print("j", v.getName)
override def caseInstanceFieldRef(v: InstanceFieldRef) =
{
v.getBase.apply(VS)
ps.c.print("->", className(v.getFieldRef.declaringClass),
"::", fieldName(v.getFieldRef))
}
override def caseStaticFieldRef(v: StaticFieldRef) =
if (v.getFieldRef.declaringClass == sootclass)
ps.c.print(fieldName(v.getFieldRef))
else
ps.c.print("(", className(v.getFieldRef.declaringClass), "::classInit(&F), ",
className(v.getFieldRef.declaringClass), "::", fieldName(v.getFieldRef),
")")
override def caseArrayRef(v: ArrayRef) =
{
val t = v.getType
var needscast = t.isInstanceOf[RefLikeType]
if (needscast)
{
ps.c.print("dynamic_cast< ")
translateType(v.getType, ps.c)
ps.c.print(" >(")
}
if (!notnull)
ps.c.print("::com::cowlark::cowjac::NullCheck(")
v.getBase.apply(VS)
if (!notnull)
ps.c.print(")")
ps.c.print("->get(&F, ")
v.getIndex.apply(VS)
ps.c.print(")")
if (needscast)
ps.c.print(")")
}
override def caseLengthExpr(v: LengthExpr) =
{
if (!notnull)
ps.c.print("::com::cowlark::cowjac::NullCheck(")
v.getOp.apply(VS)
if (!notnull)
ps.c.print(")")
ps.c.print("->length()")
}
override def caseParameterRef(v: ParameterRef) =
ps.c.print("p", String.valueOf(v.getIndex))
override def caseCastExpr(v: CastExpr) =
{
if (v.getCastType.isInstanceOf[PrimType])
{
ps.c.print("(")
translateType(v.getCastType, ps.c)
ps.c.print(")(")
v.getOp.apply(VS)
ps.c.print(")")
}
else
{
ps.c.print("::com::cowlark::cowjac::Cast< ")
translateType(v.getOp.getType, ps.c)
ps.c.print(", ")
translateType(v.getCastType, ps.c)
ps.c.print(" >(&F, ")
v.getOp.apply(VS)
ps.c.print(")")
}
}
override def caseInstanceOfExpr(v: InstanceOfExpr) =
{
ps.c.print("!!dynamic_cast< ")
translateType(v.getCheckType, ps.c)
ps.c.print(" >(")
v.getOp.apply(VS)
ps.c.print(")")
}
override def caseAddExpr(v: AddExpr) = caseBinopExpr(v)
override def caseSubExpr(v: SubExpr) = caseBinopExpr(v)
override def caseMulExpr(v: MulExpr) = caseBinopExpr(v)
override def caseDivExpr(v: DivExpr) = caseBinopExpr(v)
override def caseRemExpr(v: RemExpr) = caseBinopExpr(v)
override def caseShlExpr(v: ShlExpr) = caseBinopExpr(v)
override def caseShrExpr(v: ShrExpr) = caseBinopExpr(v)
override def caseUshrExpr(v: UshrExpr) = caseBinopXExpr(v, "Ushr")
override def caseGeExpr(v: GeExpr) = caseBinopExpr(v)
override def caseGtExpr(v: GtExpr) = caseBinopExpr(v)
override def caseLeExpr(v: LeExpr) = caseBinopExpr(v)
override def caseLtExpr(v: LtExpr) = caseBinopExpr(v)
override def caseEqExpr(v: EqExpr) = caseBinopExpr(v)
override def caseNeExpr(v: NeExpr) = caseBinopExpr(v)
override def caseCmpExpr(v: CmpExpr) = caseBinopXExpr(v, "Cmp")
override def caseCmpgExpr(v: CmpgExpr) = caseBinopXExpr(v, "Cmpg")
override def caseCmplExpr(v: CmplExpr) = caseBinopXExpr(v, "Cmpl")
override def caseAndExpr(v: AndExpr) = caseBinopExpr(v)
override def caseOrExpr(v: OrExpr) = caseBinopExpr(v)
override def caseXorExpr(v: XorExpr) = caseBinopExpr(v)
private def caseBinopExpr(v: BinopExpr) =
{
v.getOp1.apply(VS)
ps.c.print(v.getSymbol)
v.getOp2.apply(VS)
}
private def caseBinopXExpr(v: BinopExpr, x: String) =
{
ps.c.print("::com::cowlark::cowjac::", x, "(")
v.getOp1.apply(VS)
ps.c.print(", ")
v.getOp2.apply(VS)
ps.c.print(")")
}
override def caseNegExpr(v: NegExpr) =
{
ps.c.print("-")
v.getOp.apply(VS)
}
override def caseNewExpr(v: NewExpr) =
{
val t = v.getType
if (t.isInstanceOf[RefType])
{
val rt = t.asInstanceOf[RefType]
ps.c.print("(", className(rt.getSootClass), "::classInit(&F), ")
ps.c.print("new ")
v.getType.apply(NS)
ps.c.print(")")
}
else
{
v.getType.apply(NS)
ps.c.print(")")
}
}
override def caseNewArrayExpr(v: NewArrayExpr) =
{
val t = v.getBaseType
ps.c.print("new ")
if (t.isInstanceOf[RefLikeType])
ps.c.print("::com::cowlark::cowjac::ObjectArray")
else
{
ps.c.print("::com::cowlark::cowjac::ScalarArray< ")
translateType(t, ps.c)
ps.c.print(" >")
}
ps.c.print("(&F, ", classConstant(t), "->getArrayType(&F), ")
v.getSize.apply(VS)
ps.c.print(")")
}
private def parameters(v: InvokeExpr)
{
ps.c.print("(&F")
for (arg <- v.getArgs)
{
ps.c.print(", ")
arg.apply(VS)
}
ps.c.print(")")
}
override def caseInterfaceInvokeExpr(v: InterfaceInvokeExpr) =
caseInstanceInvokeExpr(v)
override def caseVirtualInvokeExpr(v: VirtualInvokeExpr) =
caseInstanceInvokeExpr(v)
def caseInstanceInvokeExpr(v: InstanceInvokeExpr) =
{
if (!notnull)
ps.c.print("com::cowlark::cowjac::NullCheck(")
v.getBase.apply(VS)
if (!notnull)
ps.c.print(")")
ps.c.print("->", methodName(v.getMethodRef))
parameters(v)
}
override def caseSpecialInvokeExpr(v: SpecialInvokeExpr) =
{
if (!notnull)
ps.c.print("com::cowlark::cowjac::NullCheck(")
v.getBase.apply(VS)
if (!notnull)
ps.c.print(")")
ps.c.print("->", className(v.getMethodRef.declaringClass),
"::", methodName(v.getMethodRef))
parameters(v)
}
override def caseStaticInvokeExpr(v: StaticInvokeExpr) =
{
ps.c.print(className(v.getMethodRef.declaringClass), "::",
methodName(v.getMethodRef))
parameters(v)
}
override def defaultCase(s: Any) = assert(false)
}
object SS extends AbstractStmtSwitch
{
override def caseIdentityStmt(s: IdentityStmt) = caseDefinitionStmt(s)
override def caseAssignStmt(s: AssignStmt) = caseDefinitionStmt(s)
override def caseReturnStmt(s: ReturnStmt) =
{
ps.c.print("\\treturn ")
s.getOp.apply(VS)
ps.c.print(";\\n")
}
override def caseReturnVoidStmt(s: ReturnVoidStmt) =
ps.c.print("\\treturn;\\n")
override def caseIfStmt(s: IfStmt) =
{
ps.c.print("\\tif (")
s.getCondition.apply(VS)
ps.c.print(") goto ", label(s.getTarget), ";\\n")
}
override def caseInvokeStmt(s: InvokeStmt) =
{
ps.c.print("\\t")
s.getInvokeExpr.apply(VS)
ps.c.print(";\\n")
}
private def assignment_source(s: DefinitionStmt)
{
if (s.getRightOp.isInstanceOf[CaughtExceptionRef])
{
ps.c.print("dynamic_cast< ")
translateType(s.getLeftOp.getType, ps.c)
ps.c.print(" >(caughtexception)")
}
else
s.getRightOp.apply(VS)
}
def caseDefinitionStmt(s: DefinitionStmt) =
{
ps.c.print("\\t")
if (s.getLeftOp.isInstanceOf[ArrayRef])
{
val target = s.getLeftOp.asInstanceOf[ArrayRef];
if (!notnull)
ps.c.print("::com::cowlark::cowjac::NullCheck(")
target.getBase.apply(VS)
if (!notnull)
ps.c.print(")")
ps.c.print("->set(&F, ")
target.getIndex.apply(VS)
ps.c.print(", ")
assignment_source(s)
ps.c.print(")")
}
else
{
if (s.getLeftOp.isInstanceOf[Local] &&
s.getLeftOp.getType.isInstanceOf[RefLikeType])
{
/* Assign to local with is a reference; must remember
* to update the stack frame to make GC work. */
val local = s.getLeftOp.asInstanceOf[Local]
ps.c.print("F.f", local.getName, " = ")
}
s.getLeftOp.apply(VS)
ps.c.print(" = ")
assignment_source(s)
}
ps.c.print(";\\n")
}
override def caseThrowStmt(s: ThrowStmt) =
{
ps.c.print("\\tthrow ")
s.getOp.apply(VS)
ps.c.print(";\\n")
}
override def caseGotoStmt(s: GotoStmt) =
ps.c.print("\\tgoto ", label(s.getTarget), ";\\n")
override def caseEnterMonitorStmt(s: EnterMonitorStmt) =
{
ps.c.print("\\t")
if (!notnull)
ps.c.print("com::cowlark::cowjac::NullCheck(")
s.getOp.apply(VS)
if (!notnull)
ps.c.print(")")
ps.c.print("->enterMonitor();\\n")
}
override def caseExitMonitorStmt(s: ExitMonitorStmt) =
{
ps.c.print("\\t")
if (!notnull)
ps.c.print("com::cowlark::cowjac::NullCheck(")
s.getOp.apply(VS)
if (!notnull)
ps.c.print(")")
ps.c.print("->leaveMonitor();\\n")
}
override def caseTableSwitchStmt(s: TableSwitchStmt) =
{
ps.c.print("\\tswitch (")
s.getKey.apply(VS)
ps.c.print(")\\n")
ps.c.print("\\t{\\n")
var value = s.getLowIndex
for (to <- s.getTargets)
{
ps.c.print("\\t\\tcase ",
value.toString, ": goto ",
label(to.asInstanceOf[soot.Unit]),
";\\n")
value += 1
}
ps.c.print("\\t\\tdefault: goto ",
label(s.getDefaultTarget), ";\\n")
ps.c.print("\\t}\\n")
}
override def caseLookupSwitchStmt(s: LookupSwitchStmt) =
{
ps.c.print("\\tswitch (")
s.getKey.apply(VS)
ps.c.print(")\\n")
ps.c.print("\\t{\\n")
for (i <- 0 until s.getTargetCount)
{
ps.c.print("\\t\\tcase ",
s.getLookupValue(i).toString, ": goto ",
label(s.getTarget(i)),
";\\n")
}
ps.c.print("\\t\\tdefault: goto ",
label(s.getDefaultTarget), ";\\n")
ps.c.print("\\t}\\n")
}
override def defaultCase(s: Any) = assert(false)
}
var oldunit: soot.Unit = null
for (unit <- body.getUnits)
{
/* If this is a target of a jump, then we need to add a label.
* An instruction is not a jump target if the only way to it is
* from the preceding instruction. */
val junction =
if ((ug.getPredsOf(unit).size == 1) && (ug.getPredsOf(unit).get(0) == oldunit))
if (oldunit.isInstanceOf[TableSwitchStmt] || oldunit.isInstanceOf[LookupSwitchStmt])
true
else
false
else
true
if (junction)
ps.c.print(label(unit), ":\\n")
val tag = unit.getTag("NullCheckTag").asInstanceOf[NullCheckTag]
notnull = (tag != null) && !tag.needCheck()
unit.apply(SS)
oldunit = unit
}
ps.c.print("}\\n\\n")
}
def forwardDeclare(sootclass: SootClass)
{
val nslevels = sootclass.getName.split('.')
for (i <- 0 to nslevels.length-2)
ps.h.print("namespace ", nslevels(i), " { ")
ps.h.print("class ", sootclass.getJavaStyleName, "; ")
for (i <- 0 to nslevels.length-2)
ps.h.print("}")
ps.h.print("\\n")
}
def emitTrampoline(frommethod: SootMethod, tomethod: SootMethod)
{
ps.h.print("\\n\\t/* (declared in ", className(frommethod.getDeclaringClass), ") */\\n")
ps.h.print("\\t")
translateModifier(frommethod, ps.h)
ps.h.print("virtual ")
translateType(frommethod.getReturnType, ps.h)
ps.h.print(" ", methodName(frommethod))
ps.h.print("(com::cowlark::cowjac::Stackframe* F")
for (i <- 0 until frommethod.getParameterCount)
{
val t = frommethod.getParameterType(i)
ps.h.print(", ")
translateType(t, ps.h)
ps.h.print(" p", String.valueOf(i))
}
ps.h.print(")\\n", "\\t{\\n", "\\t\\t")
if (!frommethod.getReturnType.isInstanceOf[VoidType])
ps.h.print("return ")
ps.h.print(className(tomethod.getDeclaringClass),
"::", methodName(tomethod), "(F")
for (i <- 0 until frommethod.getParameterCount)
{
val t = frommethod.getParameterType(i)
ps.h.print(", p", String.valueOf(i))
}
ps.h.print(");\\n", "\\t}\\n")
}
val jname = sootclass.getName()
val cxxname = javaToCXX(jname)
val headername = reformName(jname, "_").toUpperCase() + "_H"
ps.h.print("#ifndef ", headername, "\\n")
ps.h.print("#define ", headername, "\\n")
ps.ch.print("#include \\"cowjac.h\\"\\n")
ps.ch.print("#include \\"cowjacarray.h\\"\\n")
ps.ch.print("#include \\"cowjacclass.h\\"\\n")
ps.h.print("\\n")
val dependencies = getClassDependencies(sootclass)
for (d <- dependencies)
{
forwardDeclare(d)
ps.ch.print("#include \\"", mangleFilename(d.getName), ".h\\"\\n")
}
ps.h.print("\\n")
ps.ch.print("\\n")
ps.h.print("#include \\"java.lang.Object.h\\"\\n")
if (sootclass.hasSuperclass)
ps.h.print("#include \\"", mangleFilename(sootclass.getSuperclass.getName), ".h\\"\\n")
for (s <- sootclass.getInterfaces)
ps.h.print("#include \\"", mangleFilename(s.getName), ".h\\"\\n")
val nslevels = jname.split('.')
for (i <- 0 to nslevels.length-2)
ps.h.print("namespace ", nslevels(i), " {\\n")
ps.h.print("\\n")
/* Class declaration and superclasses. */
ps.h.print("class ", sootclass.getJavaStyleName, " : ")
if (sootclass.hasSuperclass)
{
val superclass = sootclass.getSuperclass
if ((superclass.getName == "java.lang.Object") || superclass.isInterface)
ps.h.print("virtual ")
ps.h.print("public ", className(superclass))
}
else
ps.h.print("public com::cowlark::cowjac::Object")
val parentinterfaces =
if (sootclass.hasSuperclass)
getAllInterfaces(sootclass.getSuperclass)
else
Set.empty[SootClass]
val newinterfaces = sootclass.getInterfaces.filterNot(
parentinterfaces.contains(_))
for (i <- newinterfaces)
ps.h.print(", virtual public ", className(i))
ps.h.print("\\n{\\n")
ps.h.print("\\t/* Class management */\\n")
ps.ch.print("/* Class management */\\n")
ps.h.print("\\tpublic: static ::java::lang::Class* getClassConstant(::com::cowlark::cowjac::Stackframe*);\\n")
ps.h.print("\\tpublic: static void classInit(::com::cowlark::cowjac::Stackframe*);\\n")
if (!sootclass.declaresMethod("java.lang.Class getClass()"))
ps.h.print("\\tpublic: virtual ::java::lang::Class* getClass(::com::cowlark::cowjac::Stackframe* F) { return getClassConstant(F); }\\n")
ps.h.print("\\n")
ps.h.print("\\t/* Field declarations */\\n")
ps.ch.print("\\n/* Field definitions */\\n")
for (f <- sootclass.getFields)
{
translateFieldDeclaration(f)
translateFieldDefinition(f)
}
if (!sootclass.isInterface)
{
ps.h.print("\\n\\t/* Imported methods from superclasses */\\n")
var newinterfacemethods = Set.empty[SootMethod]
for (i <- newinterfaces)
newinterfacemethods ++= getAllInterfaceMethods(i)
for (m <- newinterfacemethods)
{
val signature = m.getSubSignature
if (!sootclass.declaresMethod(signature))
{
val pm = getMethodRecursively(sootclass.getSuperclass, signature)
if (!getAllInterfaces(pm.getDeclaringClass).contains(m.getDeclaringClass))
{
if (!pm.isAbstract)
emitTrampoline(m, pm)
}
}
}
ps.h.print("\\n")
}
ps.h.print("\\t/* Method declarations */\\n")
ps.c.print("\\n/* Method definitions */\\n")
/* Emit destructor (required to make vtable be emitted...) */
ps.h.print("\\tpublic: virtual ~", sootclass.getShortName, "() {};\\n")
/* Ordinary methods */
for (m <- sootclass.getMethods)
{
translateMethodDeclaration(m)
if (m.hasActiveBody)
translateMethodDefinition(m)
}
/* GC marking of member variables. */
ps.h.print("\\n\\tprotected: void markImpl();\\n")
ps.c.print("void ", className(sootclass), "::markImpl()\\n")
ps.c.print("{\\n")
if (sootclass.hasSuperclass)
ps.c.print("\\t", className(sootclass.getSuperclass), "::markImpl();\\n")
for (f <- sootclass.getFields)
{
if (!f.isStatic && f.getType.isInstanceOf[RefLikeType])
{
ps.c.print("\\tif (", fieldName(f), ") ",
fieldName(f), "->mark();\\n")
}
}
ps.c.print("}\\n")
/* GC marking of class variables. */
ps.h.print("\\n")
ps.h.print("\\tpublic: class Marker : public ::com::cowlark::cowjac::ContainsGlobalReferences\\n")
ps.h.print("\\t{\\n")
ps.h.print("\\t\\tpublic: void mark();\\n")
ps.h.print("\\t};\\n")
ps.h.print("\\n")
ps.ch.print("\\n/* Class marker */\\n")
ps.ch.print("\\n")
ps.ch.print("static ", className(sootclass), "::Marker* marker = 0;\\n")
ps.ch.print("void ", className(sootclass), "::Marker::mark()\\n")
ps.ch.print("{\\n")
for (f <- sootclass.getFields)
{
if (f.isStatic && f.getType.isInstanceOf[RefLikeType])
{
ps.ch.print("\\tif (",
className(sootclass), "::", fieldName(f), ") ",
className(sootclass), "::", fieldName(f), "->mark();\\n")
}
}
ps.ch.print("}\\n")
/* Class initialisation. */
ps.c.print("\\n::java::lang::Class* ", className(sootclass),
"::getClassConstant(::com::cowlark::cowjac::Stackframe* F)\\n")
ps.c.print("{\\n")
ps.c.print("\\tstatic ::java::lang::Class* classConstant = 0;\\n")
ps.c.print("\\tclassInit(F);\\n")
ps.c.print("\\tif (!classConstant)\\n")
ps.c.print("\\t{\\n")
ps.c.print("\\t\\t::com::cowlark::cowjac::SystemLock lock;\\n")
ps.c.print("\\t\\tif (!classConstant)\\n")
ps.c.print("\\t\\t\\tclassConstant = new ::com::cowlark::cowjac::SimpleClass(F, \\"" +
sootclass.getName, "\\");\\n")
ps.c.print("\\t}\\n")
ps.c.print("\\treturn classConstant;\\n")
ps.c.print("}\\n")
ps.c.print("\\nvoid ", className(sootclass),
"::classInit(::com::cowlark::cowjac::Stackframe* F)\\n")
ps.c.print("{\\n")
ps.c.print("\\tstatic bool initialised = false;\\n")
ps.c.print("\\tif (!initialised)\\n")
ps.c.print("\\t{\\n")
ps.c.print("\\t\\t::com::cowlark::cowjac::SystemLock lock;\\n")
ps.c.print("\\t\\tif (!initialised)\\n")
ps.c.print("\\t\\t{\\n")
ps.c.print("\\t\\t\\tinitialised = true;\\n")
for (sc <- stringconstants)
{
ps.ch.print("\\nstatic const jchar scd", sc._2.toString, "[] = {")
if (!sc._1.isEmpty)
ps.ch.print(sc._1.map(_.toInt.toString).reduceLeft(_ + ", " + _))
ps.ch.print("};\\n")
ps.ch.print("static ::java::lang::String* sc", sc._2.toString, " = 0;\\n")
ps.ch.print("\\n")
ps.c.print("\\t\\t\\t::com::cowlark::cowjac::ScalarArray<jchar>* scda",
sc._2.toString, " = new ::com::cowlark::cowjac::ScalarArray<jchar>(",
"F, ::com::cowlark::cowjac::PrimitiveCharClassConstant->getArrayType(F), ",
sc._1.length.toString, ", (jchar*) scd", sc._2.toString, ");\\n")
ps.c.print("\\t\\t\\tsc", sc._2.toString, " = new ::java::lang::String;\\n")
ps.c.print("\\t\\t\\tsc", sc._2.toString, "->makeImmutable();\\n")
/* This initialises the string with an internal constructor, to avoid
* copying the array (which causes nasty recursion issues during
* startup). */
ps.c.print("\\t\\t\\tsc", sc._2.toString, "->m__3cinit_3e_5f_28II_5bC_29V(F, ",
"0, ", sc._1.length.toString, ", scda", sc._2.toString, ");\\n")
}
ps.c.print("\\t\\t\\tmarker = new ", className(sootclass), "::Marker();\\n")
if (sootclass.hasSuperclass)
ps.c.print("\\t\\t\\t", className(sootclass.getSuperclass), "::classInit(F);\\n")
for (i <- newinterfaces)
ps.c.print("\\t\\t\\t", className(i), "::classInit(F);\\n")
if (sootclass.declaresMethod("void <clinit>()"))
{
val m = sootclass.getMethod("void <clinit>()")
ps.c.print("\\t\\t\\t", className(sootclass), "::", methodName(m), "(F);\\n")
}
ps.c.print("\\t\\t}\\n")
ps.c.print("\\t}\\n")
ps.c.print("}\\n")
/* Header footer. */
ps.h.print("};\\n")
ps.h.print("\\n")
for (i <- 0 until nslevels.length-1)
ps.h.print("} /* namespace ", nslevels(nslevels.length-1-i), " */\\n")
ps.h.print("#endif\\n")
}
} | davidgiven/cowjac | compiler/src/com/cowlark/cowjac/Translator.scala | Scala | bsd-2-clause | 34,961 |
/*
* Copyright 2014-2022 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.chart.util
import java.awt.Color
import java.awt.Rectangle
import java.awt.RenderingHints
import java.awt.font.LineBreakMeasurer
import java.awt.font.TextAttribute
import java.awt.geom.AffineTransform
import java.awt.image.BufferedImage
import java.awt.image.RenderedImage
import java.io.ByteArrayInputStream
import java.io.ByteArrayOutputStream
import java.io.InputStream
import java.io.OutputStream
import java.text.AttributedString
import javax.imageio.IIOImage
import javax.imageio.ImageIO
import javax.imageio.ImageTypeSpecifier
import javax.imageio.ImageWriter
import javax.imageio.metadata.IIOMetadata
import javax.imageio.metadata.IIOMetadataFormatImpl
import javax.imageio.metadata.IIOMetadataNode
import scala.util.Using
import scala.util.Using.Releasable
object PngImage {
// Disable using on-disk cache for images. Avoids temp files on shared services.
ImageIO.setUseCache(false)
// Should we use antialiasing? This will typically need to be disabled for tests to
// get reliable image comparisons.
var useAntiAliasing: Boolean = true
def apply(bytes: Array[Byte]): PngImage = {
val input = new ByteArrayInputStream(bytes)
apply(input)
}
def apply(input: InputStream): PngImage = {
try {
val iterator = ImageIO.getImageReadersBySuffix("png")
require(iterator.hasNext, "no image readers for png")
val reader = iterator.next()
reader.setInput(ImageIO.createImageInputStream(input), true)
val index = 0
val metadata = reader.getImageMetadata(index)
val fields = extractTxtFields(metadata)
val image = reader.read(index)
PngImage(image, fields)
} finally {
input.close()
}
}
private def extractTxtFields(m: IIOMetadata): Map[String, String] = {
val elements = m
.getAsTree(IIOMetadataFormatImpl.standardMetadataFormatName)
.asInstanceOf[IIOMetadataNode]
.getElementsByTagName("TextEntry")
(0 until elements.getLength)
.map(i => elements.item(i).asInstanceOf[IIOMetadataNode])
.map(m => m.getAttribute("keyword") -> m.getAttribute("value"))
.toMap
}
def userError(imgText: String, width: Int, height: Int): PngImage = {
val userErrorYellow = new Color(0xFF, 0xCF, 0x00)
error(imgText, width, height, "USER ERROR:", Color.BLACK, userErrorYellow)
}
def systemError(imgText: String, width: Int, height: Int): PngImage = {
val systemErrorRed = new Color(0xF8, 0x20, 0x00)
error(imgText, width, height, "SYSTEM ERROR:", Color.WHITE, systemErrorRed)
}
def error(
imgText: String,
width: Int,
height: Int,
imgTextPrefix: String = "ERROR:",
imgTextColor: Color = Color.WHITE,
imgBackgroundColor: Color = Color.BLACK
): PngImage = {
val fullMsg = s"$imgTextPrefix $imgText"
val image = newBufferedImage(width, height)
val g = image.createGraphics
if (useAntiAliasing) {
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON)
}
// Try to avoid problems with different default fonts on various platforms. Java will use the
// "Dialog" font by default which can get mapped differently on various systems. It looks like
// passing a bad font name into the font constructor will just silently fall back to the
// default so it should still function if this font isn't present. Uses a default font that
// is included as part of this library.
val font = Fonts.default
g.setFont(font)
g.setPaint(imgBackgroundColor)
g.fill(new Rectangle(0, 0, width, height))
g.setPaint(imgTextColor)
val attrStr = new AttributedString(fullMsg)
attrStr.addAttribute(TextAttribute.FONT, font)
val iterator = attrStr.getIterator
val measurer = new LineBreakMeasurer(iterator, g.getFontRenderContext)
val wrap = width - 8.0f
var y = 0.0f
while (measurer.getPosition < fullMsg.length) {
val layout = measurer.nextLayout(wrap)
y += layout.getAscent
layout.draw(g, 4.0f, y)
y += layout.getDescent + layout.getLeading
}
PngImage(image, Map.empty)
}
def diff(img1: RenderedImage, img2: RenderedImage): PngImage = {
val bi1 = newBufferedImage(img1)
val bi2 = newBufferedImage(img2)
val dw = List(img1.getWidth, img2.getWidth).max
val dh = List(img1.getHeight, img2.getHeight).max
val diffImg = newBufferedImage(dw, dh)
val g = diffImg.createGraphics
g.setPaint(Color.BLACK)
g.fill(new Rectangle(0, 0, dw, dh))
val red = Color.RED.getRGB
var count = 0
var x = 0
while (x < dw) {
var y = 0
while (y < dh) {
if (contains(bi1, x, y) && contains(bi2, x, y)) {
val c1 = bi1.getRGB(x, y)
val c2 = bi2.getRGB(x, y)
if (c1 != c2) {
diffImg.setRGB(x, y, red)
count += 1
}
} else {
diffImg.setRGB(x, y, red)
count += 1
}
y += 1
}
x += 1
}
val identical = (count == 0).toString
val diffCount = count.toString
val meta = Map("identical" -> identical, "diff-pixel-count" -> diffCount)
PngImage(diffImg, meta)
}
private def contains(img: RenderedImage, x: Int, y: Int): Boolean = {
x < img.getWidth && y < img.getHeight
}
private def newBufferedImage(w: Int, h: Int): BufferedImage = {
new BufferedImage(w, h, BufferedImage.TYPE_INT_ARGB)
}
private def newBufferedImage(img: RenderedImage): BufferedImage = {
img match {
case bi: BufferedImage => bi
case _ =>
val w = img.getWidth
val h = img.getHeight
val bi = new BufferedImage(w, h, BufferedImage.TYPE_INT_ARGB)
val g = bi.createGraphics
g.drawRenderedImage(img, new AffineTransform)
bi
}
}
implicit object ReleasableImageWriter extends Releasable[ImageWriter] {
def release(resource: ImageWriter): Unit = resource.dispose()
}
}
case class PngImage(data: RenderedImage, metadata: Map[String, String] = Map.empty) {
type JList = java.util.List[String]
def toByteArray: Array[Byte] = {
val buffer = new ByteArrayOutputStream
write(buffer)
buffer.toByteArray
}
def write(output: OutputStream): Unit = {
import PngImage._
val iterator = ImageIO.getImageWritersBySuffix("png")
require(iterator.hasNext, "no image writers for png")
Using.resource(iterator.next()) { writer =>
Using.resource(ImageIO.createImageOutputStream(output)) { imageOutput =>
writer.setOutput(imageOutput)
val pngMeta = writer.getDefaultImageMetadata(new ImageTypeSpecifier(data), null)
metadata.foreachEntry { (k, v) =>
val textEntry = new IIOMetadataNode("TextEntry")
textEntry.setAttribute("keyword", k)
textEntry.setAttribute("value", v)
textEntry.setAttribute("compression", if (v.length > 100) "zip" else "none")
val text = new IIOMetadataNode("Text")
text.appendChild(textEntry)
val root = new IIOMetadataNode(IIOMetadataFormatImpl.standardMetadataFormatName)
root.appendChild(text)
pngMeta.mergeTree(IIOMetadataFormatImpl.standardMetadataFormatName, root)
}
val iioImage = new IIOImage(data, null, pngMeta)
writer.write(null, iioImage, null)
}
}
}
}
| Netflix/atlas | atlas-chart/src/main/scala/com/netflix/atlas/chart/util/PngImage.scala | Scala | apache-2.0 | 7,957 |
package net.ruippeixotog.structs
class BinomialQueueSpec extends PriorityQueueSpec[BinomialQueue] {
def queueName = "binomial queue"
} | ruippeixotog/functional-brodal-queues | src/test/scala/net/ruippeixotog/structs/BinomialQueueSpec.scala | Scala | mit | 137 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600a.v3
import uk.gov.hmrc.ct.box.{Calculated, CtBoxIdentifier, CtOptionalInteger}
import uk.gov.hmrc.ct.ct600.v3.calculations.LoansToParticipatorsCalculator
import uk.gov.hmrc.ct.ct600a.v3.retriever.CT600ABoxRetriever
case class A75(value: Option[Int]) extends CtBoxIdentifier(name = "A75 - Total of all loans outstanding at end of return period - including all loans outstanding at the end of the return period, whether they were made in this period or an earlier one")
with CtOptionalInteger
object A75 extends Calculated[A75, CT600ABoxRetriever] with LoansToParticipatorsCalculator {
override def calculate(fieldValueRetriever: CT600ABoxRetriever): A75 = {
calculateA75(fieldValueRetriever.a15(), fieldValueRetriever.lp04())
}
}
| liquidarmour/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600a/v3/A75.scala | Scala | apache-2.0 | 1,371 |
package com.datastax.spark.connector.util
import java.io.IOException
import com.datastax.driver.core.{PreparedStatement, Session}
import com.datastax.spark.connector.cql.{ColumnDef, TableDef}
import com.datastax.spark.connector.util.Quote._
import scala.collection.Seq
object PatitionKeyTools {
/**
* This query is only used to build a prepared statement so we can more easily extract
* partition tokens from tables. We prepare a statement of the form SELECT * FROM keyspace.table
* where x= .... This statement is never executed.
*/
private[connector] def querySelectUsingOnlyPartitionKeys(tableDef: TableDef): String = {
val partitionKeys = tableDef.partitionKey
def quotedColumnNames(columns: Seq[ColumnDef]) = partitionKeys.map(_.columnName).map(quote)
val whereClause = quotedColumnNames(partitionKeys).map(c => s"$c = :$c").mkString(" AND ")
s"SELECT * FROM ${quote(tableDef.keyspaceName)}.${quote(tableDef.tableName)} WHERE $whereClause"
}
private[connector] def prepareDummyStatement(session: Session, tableDef: TableDef): PreparedStatement = {
try {
session.prepare(querySelectUsingOnlyPartitionKeys(tableDef))
}
catch {
case t: Throwable =>
throw new IOException(
s"""Failed to prepare statement
| ${querySelectUsingOnlyPartitionKeys(tableDef)}: """.stripMargin + t.getMessage, t)
}
}
}
| maasg/spark-cassandra-connector | spark-cassandra-connector/src/main/scala/com/datastax/spark/connector/util/PatitionKeyTools.scala | Scala | apache-2.0 | 1,404 |
import akka.actor._
import btc.common.UserManagerMessages.LookupUser
import btc.common.WebSocketHandlerMessages.InitActorResponse
import scala.concurrent.duration.FiniteDuration
object UsersManager {
def props(broadcaster: ActorRef, keepAliveTimeout: FiniteDuration) = Props(new UsersManager(broadcaster, keepAliveTimeout))
}
class UsersManager(broadcaster: ActorRef, keepAliveTimeout: FiniteDuration) extends Actor with ActorLogging {
override def receive: Receive = {
case LookupUser(id) =>
log.info(s"Got user lookup request with id $id")
val userHandler = context.actorOf(UserHandler.props(id, sender(), broadcaster, keepAliveTimeout))
context.system.scheduler.schedule(keepAliveTimeout * 3, keepAliveTimeout, userHandler, UserHandler.KeepAlive)(context.system.dispatcher)
sender() ! InitActorResponse(userHandler)
}
} | ldrygala/reactive-microservices | btc-users/src/main/scala/UsersManager.scala | Scala | mit | 858 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.eagle.datastream.storm
import java.util
import backtype.storm.spout.SpoutOutputCollector
import backtype.storm.task.TopologyContext
import backtype.storm.topology.OutputFieldsDeclarer
import backtype.storm.topology.base.BaseRichSpout
import backtype.storm.tuple.Fields
import backtype.storm.utils.Utils
import org.apache.eagle.datastream.core.StreamInfo
import org.apache.eagle.datastream.utils.NameConstants
import org.slf4j.LoggerFactory
import scala.collection.JavaConverters._
case class IteratorStreamSpout(iterator: Iterator[Any])(implicit info:StreamInfo) extends BaseRichSpout {
val LOG = LoggerFactory.getLogger(classOf[IterableStreamSpout])
var _collector:SpoutOutputCollector=null
var _iterator:Iterator[Any] = null
override def open(conf: util.Map[_, _], context: TopologyContext, collector: SpoutOutputCollector): Unit = {
this._collector = collector
this._iterator = iterator
}
override def nextTuple(): Unit = {
if(_iterator.hasNext){
val current = _iterator.next().asInstanceOf[AnyRef]
if(info.outKeyed) {
_collector.emit(List(info.keySelector.key(current),current).asJava.asInstanceOf[util.List[AnyRef]])
}else{
_collector.emit(List(current).asJava)
}
}else{
LOG.info("No tuple left, sleep forever")
this.deactivate()
Utils.sleep(Long.MaxValue)
}
}
override def declareOutputFields(declarer: OutputFieldsDeclarer): Unit = {
if(info.outKeyed) {
declarer.declare(new Fields(NameConstants.FIELD_KEY,NameConstants.FIELD_VALUE))
}else{
declarer.declare(new Fields(s"${NameConstants.FIELD_PREFIX}0"))
}
}
}
| Jashchahal/incubator-eagle | eagle-core/eagle-data-process/eagle-stream-process-api/src/main/scala/org/apache/eagle/datastream/storm/IteratorStreamSpout.scala | Scala | apache-2.0 | 2,463 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package org.apache.toree.communication.socket
import org.zeromq.ZMQ
/**
* Represents the type option used to indicate the type of socket to create.
*
* @param `type` The type as an integer
*/
sealed class SocketType(val `type`: Int)
/** Represents a publish socket. */
case object PubSocket extends SocketType(ZMQ.PUB)
/** Represents a subscribe socket. */
case object SubSocket extends SocketType(ZMQ.SUB)
/** Represents a reply socket. */
case object RepSocket extends SocketType(ZMQ.REP)
/** Represents a request socket. */
case object ReqSocket extends SocketType(ZMQ.REQ)
/** Represents a router socket. */
case object RouterSocket extends SocketType(ZMQ.ROUTER)
/** Represents a dealer socket. */
case object DealerSocket extends SocketType(ZMQ.DEALER)
| Myllyenko/incubator-toree | communication/src/main/scala/org/apache/toree/communication/socket/SocketType.scala | Scala | apache-2.0 | 1,584 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.oap.adapter
import org.apache.spark.rdd.InputFileBlockHolder
import org.apache.spark.unsafe.types.UTF8String
object InputFileNameHolderAdapter {
/**
* Returns the holding file name or empty string if it is unknown.
*/
def getInputFileName(): UTF8String = InputFileBlockHolder.getInputFilePath
}
| Intel-bigdata/OAP | oap-cache/oap/src/main/scala/org/apache/spark/sql/oap/adapter/InputFileNameHolderAdapter.scala | Scala | apache-2.0 | 1,138 |
package io.chymyst.test
import org.scalatest.{Args, FlatSpec, Matchers, Status}
class LogSpec extends FlatSpec with Matchers {
protected override def runTest(testName: String, args: Args): Status = {
val initTime = System.currentTimeMillis()
println(s"*** Starting test ($initTime): $testName")
val result = super.runTest(testName, args)
println(s"*** Finished test ($initTime): $testName in ${System.currentTimeMillis() - initTime} ms")
result
}
}
| Chymyst/chymyst-core | core/src/test/scala/io/chymyst/test/LogSpec.scala | Scala | apache-2.0 | 476 |
package org.phenoscape.kb.matrix.reports
import org.phenoscape.io.NeXMLReader
import org.phenoscape.model.MultipleState
import org.phenoscape.model.Association
import org.phenoscape.model.AssociationSupport
import org.phenoscape.model.Taxon
import org.phenoscape.model.State
import org.phenoscape.model.Character
import org.obo.datamodel.impl.OBOSessionImpl
import java.io.File
import scala.collection.JavaConversions._
object CountPopulatedCells extends App {
val matrixFile = args(0)
val dataset = new NeXMLReader(new File(matrixFile), new OBOSessionImpl()).getDataSet
val associations = dataset.getAssociationSupport
println("Number of populated cells: " + associations.keys.size)
val assertedAssociations = associations filter { case (association, supports) => supports exists (_.isDirect) }
println("Number of asserted cells: " + assertedAssociations.keys.size)
val groupedByCharacter = associations groupBy { case (association, supports) => association.getCharacterID }
val assertedGroupedByCharacter = assertedAssociations groupBy { case (association, supports) => association.getCharacterID }
val charactersWithBothPresenceAndAbsenceAsserted = assertedGroupedByCharacter.filter {
case (characterID, associations) => (associations.keys exists (_.getStateID.endsWith("1"))) && (associations.keys exists (_.getStateID.endsWith("0")))
}
println("Number of characters with both presence and absence asserted: " + charactersWithBothPresenceAndAbsenceAsserted.keys.size)
val cellsForInformativelyAssertedCharacters = charactersWithBothPresenceAndAbsenceAsserted.flatMap{case (characterID, associations) => associations.keys}.toSet
println("Number of cells populated in informatively asserted characters: " + cellsForInformativelyAssertedCharacters.size)
val taxaForInformativelyAssertedCharacters = cellsForInformativelyAssertedCharacters.map(_.getTaxonID).toSet
println("Number of taxa for informatively asserted characters: " + taxaForInformativelyAssertedCharacters.size)
val charactersWithoutAssertedCells = groupedByCharacter filterNot {
case (characterID, associations) => (associations.values.flatten exists (_.isDirect))
}
println("Number of characters with no asserted cells: " + charactersWithoutAssertedCells.keys.size)
val idToCharacterLabel = (dataset.getCharacters map { c => (c.getNexmlID -> c.getLabel) }).toMap
val presenceAssociations = associations filter { case (association, supports) => association.getStateID.endsWith("1") }
println("Number of presence associations: " + presenceAssociations.keys.size)
val absenceAssociations = associations filter { case (association, supports) => association.getStateID.endsWith("0") }
println("Number of absence associations: " + absenceAssociations.keys.size)
val presencesGroupedByCharacter = presenceAssociations groupBy { case (association, supports) => association.getCharacterID }
val absencesGroupedByCharacter = absenceAssociations groupBy { case (association, supports) => association.getCharacterID }
val charactersWithAssertedPresences = presencesGroupedByCharacter filter {
case (characterID, associations) => (associations.values.flatten exists (_.isDirect))
}
val charactersWithoutAssertedPresences = presencesGroupedByCharacter filterNot {
case (characterID, associations) => (associations.values.flatten exists (_.isDirect))
}
val charactersWithAssertedAbsences = absencesGroupedByCharacter filter {
case (characterID, associations) => (associations.values.flatten exists (_.isDirect))
}
val charactersWithoutAssertedAbsences = absencesGroupedByCharacter filterNot {
case (characterID, associations) => (associations.values.flatten exists (_.isDirect))
}
val charactersMadeInformativeByInferredAbsences = charactersWithAssertedPresences.keys.toSet & charactersWithoutAssertedAbsences.keys.toSet
val charactersMadeInformativeByInferredPresences = charactersWithAssertedAbsences.keys.toSet & charactersWithoutAssertedPresences.keys.toSet
println("Number of characters made informative by inferred absences: " + charactersMadeInformativeByInferredAbsences.size)
println("Number of characters made informative by inferred presences: " + charactersMadeInformativeByInferredPresences.size)
} | phenoscape/ontotrace | src/main/scala/org/phenoscape/kb/matrix/reports/CountPopulatedCells.scala | Scala | mit | 4,267 |
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package com.jspha.maia.generic
import scala.language.higherKinds
import io.circe._
import shapeless._
import shapeless.labelled._
import com.jspha.maia._
import scala.collection.immutable.HashMap
trait ResponseEncoder[Api[_ <: Fields]] extends Encoder[Response[Api]]
object ResponseEncoder {
implicit def ResponseEncoderGeneric[Api[_ <: Fields], Repr <: HList](
implicit gen: LabelledGeneric.Aux[Response[Api], Repr],
worker: Lazy[Worker[Repr]]
): ResponseEncoder[Api] =
new ResponseEncoder[Api] {
def apply(a: Response[Api]): Json =
Encoder
.encodeMapLike[HashMap, Symbol, Json]
.apply(worker.value(gen.to(a)))
}
trait Worker[Repr <: HList] {
def apply(r: Repr): HashMap[Symbol, Json]
}
object Worker {
implicit val WorkerHNil: Worker[HNil] = _ => HashMap()
implicit val NothingEncoder: Encoder[Nothing] =
(a: Nothing) => a.asInstanceOf[Json]
def EAEncoder[E, A](implicit aEncoder: Encoder[A],
eEncoder: Encoder[E]): Encoder[Either[E, A]] =
Encoder.encodeEither("err", "ok")(eEncoder, aEncoder)
implicit def WorkerAtomE[K <: Symbol, A, E, T <: HList](
implicit recur: Worker[T],
aEncoder: Encoder[A],
eEncoder: Encoder[E],
kWitness: Witness.Aux[K]
): Worker[FieldType[K, Fields.Response#AtomE[E, A]] :: T] =
(r: FieldType[K, Fields.Response#AtomE[E, A]] :: T) => {
val later: HashMap[Symbol, Json] = recur(r.tail)
val head: Option[Either[E, A]] = r.head
head match {
case None => later
case Some(res) =>
later + (kWitness.value -> EAEncoder[E, A].apply(res))
}
}
/**
* We need to provide this Nothing-ed instance explicitly in order to
* relieve the search process from finding Encoder[Nothing].
*/
implicit def WorkerAtomE_NoError[K <: Symbol, A, T <: HList](
implicit recur: Worker[T],
aEncoder: Encoder[A],
kWitness: Witness.Aux[K]
): Worker[FieldType[K, Fields.Response#AtomE[Nothing, A]] :: T] =
WorkerAtomE[K, A, Nothing, T](recur, aEncoder, NothingEncoder, kWitness)
implicit def WorkerIAtomE[K <: Symbol, A, I, E, T <: HList](
implicit recur: Worker[T],
aEncoder: Encoder[A],
eEncoder: Encoder[E],
iEncoder: KeyEncoder[I],
kWitness: Witness.Aux[K]
): Worker[FieldType[K, Fields.Response#IAtomE[I, E, A]] :: T] =
(r: FieldType[K, Fields.Response#IAtomE[I, E, A]] :: T) => {
val later: HashMap[Symbol, Json] = recur(r.tail)
val head: HashMap[I, Either[E, A]] = r.head
val enc: ObjectEncoder[HashMap[I, Either[E, A]]] =
Encoder.encodeMapLike[HashMap, I, Either[E, A]](
iEncoder,
EAEncoder[E, A]
)
val json = enc(head)
if (head.isEmpty) later else later + (kWitness.value -> json)
}
implicit def WorkerIAtomE_NoError[K <: Symbol, A, I, T <: HList](
implicit recur: Worker[T],
aEncoder: Encoder[A],
iEncoder: KeyEncoder[I],
kWitness: Witness.Aux[K]
): Worker[FieldType[K, Fields.Response#IAtomE[I, Nothing, A]] :: T] =
WorkerIAtomE[K, A, I, Nothing, T](recur,
aEncoder,
NothingEncoder,
iEncoder,
kWitness)
implicit def WorkerObjE[K <: Symbol,
A[_ <: Fields],
E,
C <: Cardinality,
T <: HList](
implicit recur: Worker[T],
eEncoder: Encoder[E],
aEncoder: Lazy[ResponseEncoder[A]],
collOps: Cardinality.Ops[C],
kWitness: Witness.Aux[K]
): Worker[FieldType[K, Fields.Response#ObjE[C, E, A]] :: T] =
(r: FieldType[K, Fields.Response#ObjE[C, E, A]] :: T) => {
val later: HashMap[Symbol, Json] = recur(r.tail)
val head: Option[Either[E, C#Coll[Response[A]]]] =
r.head
head match {
case None => later
case Some(subResp) =>
val json: Json = EAEncoder[E, C#Coll[Response[A]]](
collOps.encoder(aEncoder.value),
eEncoder
).apply(subResp)
later + (kWitness.value -> json)
}
}
implicit def WorkerObjE_One[K <: Symbol, A[_ <: Fields], E, T <: HList](
implicit recur: Worker[T],
eEncoder: Encoder[E],
aEncoder: Lazy[ResponseEncoder[A]],
collOps: Cardinality.Ops[Cardinality.One],
kWitness: Witness.Aux[K]
): Worker[FieldType[K, Fields.Response#ObjE[Cardinality.One, E, A]] :: T] =
WorkerObjE[K, A, E, Cardinality.One, T](recur,
eEncoder,
aEncoder,
collOps,
kWitness)
implicit def WorkerObjE_Opt[K <: Symbol, A[_ <: Fields], E, T <: HList](
implicit recur: Worker[T],
eEncoder: Encoder[E],
aEncoder: Lazy[ResponseEncoder[A]],
collOps: Cardinality.Ops[Cardinality.Opt],
kWitness: Witness.Aux[K]
): Worker[FieldType[K, Fields.Response#ObjE[Cardinality.Opt, E, A]] :: T] =
WorkerObjE[K, A, E, Cardinality.Opt, T](recur,
eEncoder,
aEncoder,
collOps,
kWitness)
implicit def WorkerObjE_Many[K <: Symbol, A[_ <: Fields], E, T <: HList](
implicit recur: Worker[T],
eEncoder: Encoder[E],
aEncoder: Lazy[ResponseEncoder[A]],
collOps: Cardinality.Ops[Cardinality.Many],
kWitness: Witness.Aux[K]
): Worker[FieldType[K, Fields.Response#ObjE[Cardinality.Many, E, A]] :: T] =
WorkerObjE[K, A, E, Cardinality.Many, T](recur,
eEncoder,
aEncoder,
collOps,
kWitness)
implicit def WorkerObjE_One_Nothing[K <: Symbol,
A[_ <: Fields],
T <: HList](
implicit recur: Worker[T],
aEncoder: Lazy[ResponseEncoder[A]],
collOps: Cardinality.Ops[Cardinality.One],
kWitness: Witness.Aux[K]
): Worker[FieldType[K, Fields.Response#ObjE[Cardinality.One, Nothing, A]]
:: T] =
WorkerObjE[K, A, Nothing, Cardinality.One, T](recur,
NothingEncoder,
aEncoder,
collOps,
kWitness)
implicit def WorkerObjE_Opt_Nothing[K <: Symbol,
A[_ <: Fields],
T <: HList](
implicit recur: Worker[T],
aEncoder: Lazy[ResponseEncoder[A]],
collOps: Cardinality.Ops[Cardinality.Opt],
kWitness: Witness.Aux[K]
): Worker[FieldType[K, Fields.Response#ObjE[Cardinality.Opt, Nothing, A]]
:: T] =
WorkerObjE[K, A, Nothing, Cardinality.Opt, T](recur,
NothingEncoder,
aEncoder,
collOps,
kWitness)
implicit def WorkerObjE_Many_Nothing[K <: Symbol,
A[_ <: Fields],
T <: HList](
implicit recur: Worker[T],
aEncoder: Lazy[ResponseEncoder[A]],
collOps: Cardinality.Ops[Cardinality.Many],
kWitness: Witness.Aux[K]
): Worker[FieldType[K, Fields.Response#ObjE[Cardinality.Many, Nothing, A]]
:: T] =
WorkerObjE[K, A, Nothing, Cardinality.Many, T](recur,
NothingEncoder,
aEncoder,
collOps,
kWitness)
implicit def WorkerIObjE[K <: Symbol,
A[_ <: Fields],
I,
E,
C <: Cardinality,
T <: HList](
implicit recur: Worker[T],
eEncoder: Encoder[E],
aEncoder: Lazy[ResponseEncoder[A]],
iEncoder: KeyEncoder[I],
collOps: Cardinality.Ops[C],
kWitness: Witness.Aux[K]
): Worker[FieldType[K, Fields.Response#IObjE[I, C, E, A]] :: T] =
(r: FieldType[K, Fields.Response#IObjE[I, C, E, A]] :: T) => {
val later: HashMap[Symbol, Json] = recur(r.tail)
val head: HashMap[I, Either[E, C#Coll[Response[A]]]] =
r.head
if (head.isEmpty)
later
else {
val enc =
Encoder.encodeMapLike[HashMap, I, Either[E, C#Coll[Response[A]]]](
iEncoder,
EAEncoder[E, C#Coll[Response[A]]](
collOps.encoder(aEncoder.value),
eEncoder
)
)
later + (kWitness.value -> enc.apply(head))
}
}
implicit def WorkerIObjE_One[K <: Symbol,
A[_ <: Fields],
I,
E,
T <: HList](
implicit recur: Worker[T],
eEncoder: Encoder[E],
aEncoder: Lazy[ResponseEncoder[A]],
collOps: Cardinality.Ops[Cardinality.One],
iEncoder: KeyEncoder[I],
kWitness: Witness.Aux[K]
): Worker[FieldType[K, Fields.Response#IObjE[I, Cardinality.One, E, A]] ::
T] =
WorkerIObjE[K, A, I, E, Cardinality.One, T](recur,
eEncoder,
aEncoder,
iEncoder,
collOps,
kWitness)
implicit def WorkerIObjE_Opt[K <: Symbol,
A[_ <: Fields],
I,
E,
T <: HList](
implicit recur: Worker[T],
eEncoder: Encoder[E],
aEncoder: Lazy[ResponseEncoder[A]],
iEncoder: KeyEncoder[I],
collOps: Cardinality.Ops[Cardinality.Opt],
kWitness: Witness.Aux[K]
): Worker[FieldType[K, Fields.Response#IObjE[I, Cardinality.Opt, E, A]] ::
T] =
WorkerIObjE[K, A, I, E, Cardinality.Opt, T](recur,
eEncoder,
aEncoder,
iEncoder,
collOps,
kWitness)
implicit def WorkerIObjE_Many[K <: Symbol,
A[_ <: Fields],
I,
E,
T <: HList](
implicit recur: Worker[T],
eEncoder: Encoder[E],
aEncoder: Lazy[ResponseEncoder[A]],
iEncoder: KeyEncoder[I],
collOps: Cardinality.Ops[Cardinality.Many],
kWitness: Witness.Aux[K]
): Worker[
FieldType[K, Fields.Response#IObjE[I, Cardinality.Many, E, A]] :: T] =
WorkerIObjE[K, A, I, E, Cardinality.Many, T](recur,
eEncoder,
aEncoder,
iEncoder,
collOps,
kWitness)
implicit def WorkerIObjE_One_Nothing[K <: Symbol,
A[_ <: Fields],
I,
T <: HList](
implicit recur: Worker[T],
aEncoder: Lazy[ResponseEncoder[A]],
iEncoder: KeyEncoder[I],
collOps: Cardinality.Ops[Cardinality.One],
kWitness: Witness.Aux[K]
): Worker[
FieldType[K, Fields.Response#IObjE[I, Cardinality.One, Nothing, A]]
:: T] =
WorkerIObjE[K, A, I, Nothing, Cardinality.One, T](recur,
NothingEncoder,
aEncoder,
iEncoder,
collOps,
kWitness)
implicit def WorkerIObjE_Opt_Nothing[K <: Symbol,
A[_ <: Fields],
I,
T <: HList](
implicit recur: Worker[T],
aEncoder: Lazy[ResponseEncoder[A]],
iEncoder: KeyEncoder[I],
collOps: Cardinality.Ops[Cardinality.Opt],
kWitness: Witness.Aux[K]
): Worker[
FieldType[K, Fields.Response#IObjE[I, Cardinality.Opt, Nothing, A]]
:: T] =
WorkerIObjE[K, A, I, Nothing, Cardinality.Opt, T](recur,
NothingEncoder,
aEncoder,
iEncoder,
collOps,
kWitness)
implicit def WorkerIObjE_Many_Nothing[K <: Symbol,
A[_ <: Fields],
I,
T <: HList](
implicit recur: Worker[T],
aEncoder: Lazy[ResponseEncoder[A]],
iEncoder: KeyEncoder[I],
collOps: Cardinality.Ops[Cardinality.Many],
kWitness: Witness.Aux[K]
): Worker[
FieldType[K, Fields.Response#IObjE[I, Cardinality.Many, Nothing, A]]
:: T] =
WorkerIObjE[K, A, I, Nothing, Cardinality.Many, T](recur,
NothingEncoder,
aEncoder,
iEncoder,
collOps,
kWitness)
}
}
| tel/scala-maia | maia/src/main/scala-2.12/com/jspha/maia/generic/ResponseEncoder.scala | Scala | mpl-2.0 | 15,091 |
package $package$.default
import sri.universal.styles.InlineStyleSheetUniversal
object GlobalStyles extends InlineStyleSheetUniversal {
import dsl._
val wholeContainer = style(flex := 1, padding := 20)
val defaultHeader = style(backgroundColor := "#E91E63")
val defaultCardStyle = style(backgroundColor := "rgb(243, 241, 241)")
}
| scalajs-react-interface/mobile.g8 | src/main/g8/src/main/scala/$package$/default/GlobalStyles.scala | Scala | apache-2.0 | 345 |
import sbt._
import Keys._
import org.scalatra.sbt._
import com.typesafe.sbteclipse.plugin.EclipsePlugin.EclipseKeys
import play.twirl.sbt.SbtTwirl
import play.twirl.sbt.Import.TwirlKeys._
import sbtassembly._
import sbtassembly.AssemblyKeys._
object MyBuild extends Build {
val Organization = "gitbucket"
val Name = "gitbucket"
val Version = "3.6.0"
val ScalaVersion = "2.11.6"
val ScalatraVersion = "2.3.1"
lazy val project = Project (
"gitbucket",
file(".")
)
.settings(ScalatraPlugin.scalatraWithJRebel: _*)
.settings(
test in assembly := {},
assemblyMergeStrategy in assembly := {
case PathList("META-INF", xs @ _*) =>
(xs map {_.toLowerCase}) match {
case ("manifest.mf" :: Nil) => MergeStrategy.discard
case _ => MergeStrategy.discard
}
case x => MergeStrategy.first
}
)
.settings(
sourcesInBase := false,
organization := Organization,
name := Name,
version := Version,
scalaVersion := ScalaVersion,
resolvers ++= Seq(
Classpaths.typesafeReleases,
"amateras-repo" at "http://amateras.sourceforge.jp/mvn/",
"amateras-snapshot-repo" at "http://amateras.sourceforge.jp/mvn-snapshot/"
),
scalacOptions := Seq("-deprecation", "-language:postfixOps"),
libraryDependencies ++= Seq(
"org.eclipse.jgit" % "org.eclipse.jgit.http.server" % "3.4.2.201412180340-r",
"org.eclipse.jgit" % "org.eclipse.jgit.archive" % "3.4.2.201412180340-r",
"org.scalatra" %% "scalatra" % ScalatraVersion,
"org.scalatra" %% "scalatra-specs2" % ScalatraVersion % "test",
"org.scalatra" %% "scalatra-json" % ScalatraVersion,
"org.json4s" %% "json4s-jackson" % "3.2.11",
"jp.sf.amateras" %% "scalatra-forms" % "0.1.0",
"commons-io" % "commons-io" % "2.4",
"io.github.gitbucket" % "markedj" % "1.0.1",
"org.apache.commons" % "commons-compress" % "1.9",
"org.apache.commons" % "commons-email" % "1.3.3",
"org.apache.httpcomponents" % "httpclient" % "4.3.6",
"org.apache.sshd" % "apache-sshd" % "0.11.0",
"com.typesafe.slick" %% "slick" % "2.1.0",
"com.novell.ldap" % "jldap" % "2009-10-07",
"org.liquibase" % "liquibase-core" % "3.4.1",
"com.h2database" % "h2" % "1.4.180",
// "ch.qos.logback" % "logback-classic" % "1.0.13" % "runtime",
"org.eclipse.jetty" % "jetty-webapp" % "8.1.16.v20140903" % "container;provided",
"org.eclipse.jetty.orbit" % "javax.servlet" % "3.0.0.v201112011016" % "container;provided;test" artifacts Artifact("javax.servlet", "jar", "jar"),
"junit" % "junit" % "4.12" % "test",
"com.mchange" % "c3p0" % "0.9.5",
"com.typesafe" % "config" % "1.2.1",
"com.typesafe.akka" %% "akka-actor" % "2.3.10",
"com.enragedginger" %% "akka-quartz-scheduler" % "1.3.0-akka-2.3.x" exclude("c3p0","c3p0")
),
play.twirl.sbt.Import.TwirlKeys.templateImports += "gitbucket.core._",
EclipseKeys.withSource := true,
javacOptions in compile ++= Seq("-target", "7", "-source", "7"),
testOptions in Test += Tests.Argument(TestFrameworks.Specs2, "junitxml", "console"),
javaOptions in Test += "-Dgitbucket.home=target/gitbucket_home_for_test",
testOptions in Test += Tests.Setup( () => new java.io.File("target/gitbucket_home_for_test").mkdir() ),
fork in Test := true,
packageOptions += Package.MainClass("JettyLauncher")
).enablePlugins(SbtTwirl)
}
| beni55/gitbucket | project/build.scala | Scala | apache-2.0 | 3,454 |
class TwoParamSpecializedWithDefault[@specialized A, @specialized B](a: A, b: B = (??? : B))
| AlexSikia/dotty | tests/untried/pos/t7329.scala | Scala | bsd-3-clause | 93 |
package nlpdata.datasets
import nlpdata.structure._
import nlpdata.datasets.ptb._
import nlpdata.util._
package object nombank {
case class SpanIndicator(leftIndex: Int, height: Int) {
def upOne = copy(height = this.height + 1)
}
// for now, ignoring different kinds of linking... TODO do this perhaps
case class LinkedSpanIndicator(label: String, spanIndicators: List[SpanIndicator])
case class NomBankEntry(
ptbSentencePath: PTBSentencePath,
headIndex: Int,
predLemma: String,
framesetId: String,
argSpanIndicators: List[LinkedSpanIndicator]
)
// expect PTB trees, i.e., with -NONE-s and such
// also does not do anything interesting to linked spans; puts them together under one label
def getPredicateArgumentStructure(entry: NomBankEntry, refTree: SyntaxTree) =
entry match {
case NomBankEntry(ptbSentencePath, headIndex, predLemma, framesetId, argSpanIndicators) =>
val words = refTree.words
val head = words(headIndex)
val predicate = Predicate(head, predLemma, framesetId)
val argumentSpans = argSpanIndicators.map {
case LinkedSpanIndicator(label, spanIndicators) =>
val words = refTree
.foldUnlabeled(w => (List.empty[Word], SpanIndicator(w.index, 0), List(w))) {
children =>
val oldWords = children.flatMap(_._1)
val newWords = children
.filter(t => spanIndicators.contains(t._2))
.flatMap(_._3)
val newIndicator = children.head._2.upOne
val allWords = children.flatMap(_._3)
(oldWords ++ newWords, newIndicator, allWords)
}
._1
ArgumentSpan(label, words)
}
PredicateArgumentStructure(predicate, argumentSpans)
}
def getPredicateArgumentStructureReindexed(entry: NomBankEntry, refTree: SyntaxTree) = {
val pas = getPredicateArgumentStructure(entry, refTree)
// reindex it
case class IndexMappingState(curMapping: List[Option[Int]], nextIndex: Int) {
def noToken = this.copy(curMapping = None :: this.curMapping)
def yesToken =
IndexMappingState(Some(nextIndex) :: curMapping, nextIndex + 1)
}
val mapping = refTree.words
.foldLeft(IndexMappingState(Nil, 0)) {
case (acc, word) =>
if (word.pos == "-NONE-") acc.noToken
else acc.yesToken
}
.curMapping
.reverse
.toVector
def mapWord(w: Word): Option[Word] =
mapping(w.index).map(i => w.copy(index = i))
val newHeadWord = mapWord(pas.pred.head).get // should always be present
val newArgs = pas.arguments.flatMap { arg =>
arg.words
.flatMap(mapWord)
.onlyIf(_.nonEmpty)
.map(ArgumentSpan(arg.label, _))
}
PredicateArgumentStructure(pas.pred.copy(head = newHeadWord), newArgs)
}
/** Provides parsing of argument spans. */
object Parsing {
val PTBString = """wsj/(.*)""".r
def readEntry(line: String) = line.split(" ").toList match {
case PTBString(pathSuffix) :: IntMatch(sentenceNum) :: IntMatch(headIndex) :: predLemma :: framesetId :: argStrings =>
val arguments = argStrings.map { argStr =>
val (spansStr, label) = argStr.span(_ != '-')
val spanStrings = spansStr.split("[\\\\*;,]").toList
val spans = spanStrings.map(
_.split(":").toList match {
case IntMatch(leftIndex) :: IntMatch(height) :: Nil =>
SpanIndicator(leftIndex, height)
}
)
LinkedSpanIndicator(label.tail, spans)
}
NomBankEntry(
PTBSentencePath(PTBPath(pathSuffix.toUpperCase), sentenceNum),
headIndex,
predLemma,
framesetId,
arguments
)
}
}
}
| julianmichael/nlpdata | nlpdata/src/nlpdata/datasets/nombank/package.scala | Scala | mit | 3,870 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.s2graph.core.features
import java.util
import java.util.Optional
import scala.collection.JavaConversions._
import org.apache.tinkerpop.gremlin.structure.Graph
class S2GraphVariables extends Graph.Variables {
import scala.collection.mutable
private val variables = mutable.Map.empty[String, Any]
override def set(key: String, value: scala.Any): Unit = {
if (key == null) throw Graph.Variables.Exceptions.variableKeyCanNotBeNull()
if (key.isEmpty) throw Graph.Variables.Exceptions.variableKeyCanNotBeEmpty()
if (value == null) throw Graph.Variables.Exceptions.variableValueCanNotBeNull()
variables.put(key, value)
}
override def keys(): util.Set[String] = variables.keySet
override def remove(key: String): Unit = {
if (key == null) throw Graph.Variables.Exceptions.variableKeyCanNotBeNull()
if (key.isEmpty) throw Graph.Variables.Exceptions.variableKeyCanNotBeEmpty()
variables.remove(key)
}
override def get[R](key: String): Optional[R] = {
if (key == null) throw Graph.Variables.Exceptions.variableKeyCanNotBeNull()
if (key.isEmpty) throw Graph.Variables.Exceptions.variableKeyCanNotBeEmpty()
variables.get(key) match {
case None => Optional.empty()
case Some(value) => if (value == null) Optional.empty() else Optional.of(value.asInstanceOf[R])
}
}
override def toString: String = {
s"variables[size:${variables.keys.size()}]"
}
}
| SteamShon/incubator-s2graph | s2core/src/main/scala/org/apache/s2graph/core/features/S2GraphVariables.scala | Scala | apache-2.0 | 2,254 |
package com.sksamuel.elastic4s.analysis
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
class CustomAnalyzerBuilderTest extends AnyFunSuite with Matchers {
test("custom analyzer builder with all standard options") {
val a = CustomAnalyzer("my_custom_analyzer", "uaxurl", Nil, Nil)
CustomAnalyzerBuilder.build(a).string() shouldBe """{"type":"custom","tokenizer":"uaxurl"}"""
}
}
| stringbean/elastic4s | elastic4s-core/src/test/scala/com/sksamuel/elastic4s/analysis/CustomAnalyzerBuilderTest.scala | Scala | apache-2.0 | 435 |
/*
* Copyright (c) 2013. Regents of the University of California
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.berkeley.cs.amplab.adam.algorithms.realignmenttarget
import edu.berkeley.cs.amplab.adam.avro.{ADAMPileup, ADAMRecord}
import edu.berkeley.cs.amplab.adam.rich.RichADAMRecord._
import scala.collection.immutable.{TreeSet, HashSet, NumericRange}
import com.esotericsoftware.kryo.{Kryo, Serializer}
import com.esotericsoftware.kryo.io.{Input, Output}
import org.apache.spark.Logging
import scala.util.Sorting.quickSort
object ZippedTargetOrdering extends Ordering[(IndelRealignmentTarget, Int)] {
/**
* Order two indel realignment targets by earlier starting position.
*
* @param a Indel realignment target to compare.
* @param b Indel realignment target to compare.
* @return Comparison done by starting position.
*/
def compare (a: (IndelRealignmentTarget, Int), b: (IndelRealignmentTarget, Int)) : Int = {
a._1.getReadRange.start compare b._1.getReadRange.start
}
}
object TargetOrdering extends Ordering[IndelRealignmentTarget] {
/**
* Order two indel realignment targets by earlier starting position.
*
* @param a Indel realignment target to compare.
* @param b Indel realignment target to compare.
* @return Comparison done by starting position.
*/
def compare (a: IndelRealignmentTarget, b: IndelRealignmentTarget) : Int = a.getReadRange.start compare b.getReadRange.start
/**
* Compares a read to an indel realignment target to see if it starts before the start of the indel realignment target.
*
* @param target Realignment target to compare.
* @param read Read to compare.
* @return True if start of read is before the start of the indel alignment target.
*/
def lt (target: IndelRealignmentTarget, read: ADAMRecord) : Boolean = target.getReadRange.start < read.getStart
/**
* Check to see if an indel realignment target and a read are mapped over the same length.
*
* @param target Realignment target to compare.
* @param read Read to compare.
* @return True if read alignment span is identical to the target span.
*/
def equals (target: IndelRealignmentTarget, read: ADAMRecord) : Boolean = {
(target.getReadRange.start == read.getStart) && (target.getReadRange.end == read.end.get)
}
/**
* Check to see if an indel realignment target contains the given read.
*
* @param target Realignment target to compare.
* @param read Read to compare.
* @return True if read alignment is contained in target span.
*/
def contains (target: IndelRealignmentTarget, read: ADAMRecord) : Boolean = {
(target.getReadRange.start <= read.getStart) && (target.getReadRange.end >= read.end.get - 1) // -1 since read end is non-inclusive
}
/**
* Compares two indel realignment targets to see if they overlap.
*
* @param a Indel realignment target to compare.
* @param b Indel realignment target to compare.
* @return True if two targets overlap.
*/
def overlap (a: IndelRealignmentTarget, b: IndelRealignmentTarget) : Boolean = {
// Note: the last two conditions were added for completeness; they should generally not
// be necessary although maybe in weird cases (indel on both reads in a mate pair that
// span a structural variant) and then one probably would not want to re-align these
// together.
// TODO: introduce an upper bound on re-align distance as GATK does??
((a.getReadRange.start >= b.getReadRange.start && a.getReadRange.start <= b.getReadRange.end) ||
(a.getReadRange.end >= b.getReadRange.start && a.getReadRange.end <= b.getReadRange.start) ||
(a.getReadRange.start >= b.getReadRange.start && a.getReadRange.end <= b.getReadRange.end) ||
(b.getReadRange.start >= a.getReadRange.start && b.getReadRange.end <= a.getReadRange.end))
}
}
abstract class GenericRange(val readRange: NumericRange[Long]) {
def getReadRange (): NumericRange[Long] = readRange
def merge (r: GenericRange) : GenericRange
def compareRange (other : GenericRange) : Int
def compareReadRange (other : GenericRange) = {
if (readRange.start != other.getReadRange().start)
readRange.start.compareTo(other.getReadRange().start)
else
readRange.end.compareTo(other.getReadRange().end)
}
}
object IndelRange {
val emptyRange = IndelRange(
new NumericRange.Inclusive[Long](-1, -1, 1),
new NumericRange.Inclusive[Long](-1, -1, 1)
)
}
case class IndelRange (indelRange: NumericRange[Long], override val readRange: NumericRange[Long]) extends GenericRange(readRange) with Ordered[IndelRange] {
/**
* Merge two identical indel ranges.
*
* @param ir Indel range to merge in.
* @return Merged range.
*/
override def merge (ir: GenericRange) : IndelRange = {
if(this == IndelRange.emptyRange)
ir
assert(indelRange == ir.asInstanceOf[IndelRange].getIndelRange)
// do not need to check read range - read range must contain indel range, so if
// indel range is the same, read ranges will overlap
new IndelRange (indelRange,
new NumericRange.Inclusive[Long](
readRange.start min ir.readRange.start,
readRange.end max ir.readRange.end,
1)
)
}
def getIndelRange (): NumericRange[Long] = indelRange
override def compareRange (other: GenericRange) : Int =
if (indelRange.start != other.asInstanceOf[IndelRange].indelRange.start)
indelRange.start.compareTo(other.asInstanceOf[IndelRange].indelRange.start)
else
indelRange.end.compareTo(other.asInstanceOf[IndelRange].indelRange.end)
override def compare (other : IndelRange) : Int = {
val cmp = compareRange(other)
if (cmp != 0)
cmp
else
super.compareReadRange(other)
}
}
class IndelRangeSerializer extends Serializer[IndelRange] {
def write (kryo: Kryo, output: Output, obj: IndelRange) = {
output.writeLong(obj.getIndelRange().start)
output.writeLong(obj.getIndelRange().end)
output.writeLong(obj.getReadRange().start)
output.writeLong(obj.getReadRange().end)
}
def read (kryo: Kryo, input: Input, klazz: Class[IndelRange]) : IndelRange = {
val irStart = input.readLong()
val irEnd = input.readLong()
val rrStart = input.readLong()
val rrEnd = input.readLong()
new IndelRange(
new NumericRange.Inclusive[Long](irStart, irEnd, 1),
new NumericRange.Inclusive[Long](rrStart, rrEnd, 1)
)
}
}
object SNPRange {
val emptyRange = SNPRange(
-1L,
new NumericRange.Inclusive[Long](-1, -1, 1)
)
}
case class SNPRange (snpSite: Long, override val readRange: NumericRange[Long]) extends GenericRange(readRange) with Ordered[SNPRange] {
/**
* Merge two identical SNP sites.
*
* @param sr SNP range to merge in.
* @return Merged SNP range.
*/
override def merge (sr: GenericRange) : SNPRange = {
if(this == SNPRange.emptyRange)
sr
assert(snpSite == sr.asInstanceOf[SNPRange].getSNPSite)
// do not need to check read range - read range must contain snp site, so if
// snp site is the same, read ranges will overlap
new SNPRange(snpSite,
new NumericRange.Inclusive[Long](
readRange.start min sr.readRange.start,
readRange.end max sr.readRange.end,
1
)
)
}
def getSNPSite(): Long = snpSite
override def compare (other : SNPRange) : Int = {
val cmp = compareRange(other)
if (cmp != 0)
cmp
else
super.compareReadRange(other)
}
override def compareRange(other : GenericRange) : Int =
snpSite.compareTo(other.asInstanceOf[SNPRange].snpSite)
}
class SNPRangeSerializer extends Serializer[SNPRange] {
def write(kryo: Kryo, output: Output, obj: SNPRange) = {
output.writeLong(obj.getSNPSite())
output.writeLong(obj.getReadRange().start)
output.writeLong(obj.getReadRange().end)
}
def read(kryo: Kryo, input: Input, klazz: Class[SNPRange]): SNPRange = {
val SNPSite = input.readLong()
val rrStart = input.readLong()
val rrEnd = input.readLong()
new SNPRange(
SNPSite,
new NumericRange.Inclusive[Long](rrStart, rrEnd, 1)
)
}
}
object IndelRealignmentTarget {
// threshold for determining whether a pileup contains sufficient mismatch evidence
val mismatchThreshold = 0.15
/**
* Generates an indel realignment target from a pileup.
*
* @param rod Base pileup.
* @return Generated realignment target.
*/
def apply(rod: Seq[ADAMPileup]): IndelRealignmentTarget = {
/**
* If we have a indel in a pileup position, generates an indel range.
*
* @param pileup Single pileup position.
* @return Indel range.
*/
def mapEvent(pileup: ADAMPileup): IndelRange = {
Option(pileup.getReadBase) match {
case None => {
// deletion
new IndelRange(
new NumericRange.Inclusive[Long](
pileup.getPosition.toLong - pileup.getRangeOffset.toLong,
pileup.getPosition.toLong + pileup.getRangeLength.toLong - pileup.getRangeOffset.toLong - 1,
1),
new NumericRange.Inclusive[Long](pileup.getReadStart.toLong, pileup.getReadEnd.toLong - 1, 1)
)
}
case Some(o) => {
// insert
new IndelRange(
new NumericRange.Inclusive[Long](pileup.getPosition.toLong, pileup.getPosition.toLong, 1),
new NumericRange.Inclusive[Long](pileup.getReadStart.toLong, pileup.getReadEnd.toLong - 1, 1)
)
}
}
}
/**
* If we have a point event, generates a SNPRange.
*
* @param pileup Pileup position with mismatch evidence.
* @return SNP range.
*/
def mapPoint(pileup: ADAMPileup): SNPRange = {
val range : NumericRange.Inclusive[Long] =
new NumericRange.Inclusive[Long](pileup.getReadStart.toLong, pileup.getReadEnd.toLong - 1, 1)
new SNPRange(pileup.getPosition, range)
}
// segregate into indels, matches, and mismatches
val indels = extractIndels(rod)
val matches = extractMatches(rod)
val mismatches = extractMismatches(rod)
// TODO: this assumes Sanger encoding; how about older data? Should there be a property somewhere?
// calculate the quality of the matches and the mismatches
val matchQuality : Int =
if (matches.size > 0)
matches.map(_.getSangerQuality).reduce(_ + _)
else
0
val mismatchQuality : Int =
if (mismatches.size > 0)
mismatches.map(_.getSangerQuality).reduce(_ + _)
else
0
// check our mismatch ratio - if we have a sufficiently high ratio of mismatch quality, generate a snp event, else just generate indel events
if (matchQuality == 0 || mismatchQuality.toDouble / matchQuality.toDouble >= mismatchThreshold) {
new IndelRealignmentTarget(
new HashSet[IndelRange]().union(indels.map(mapEvent).toSet),
new HashSet[SNPRange]().union(mismatches.map(mapPoint).toSet)
)
} else {
new IndelRealignmentTarget(
new HashSet[IndelRange]().union(indels.map(mapEvent).toSet), HashSet[SNPRange]()
)
}
}
def extractMismatches(rod: Seq[ADAMPileup]) : Seq[ADAMPileup] = {
rod.filter(r => r.getRangeOffset == null && r.getNumSoftClipped == 0)
.filter(r => r.getReadBase != r.getReferenceBase)
}
def extractMatches(rod: Seq[ADAMPileup]) : Seq[ADAMPileup] =
rod.filter(r => r.getRangeOffset == null && r.getNumSoftClipped == 0)
.filter(r => r.getReadBase == r.getReferenceBase)
def extractIndels(rod: Seq[ADAMPileup]) : Seq[ADAMPileup] =
rod.filter(_.getRangeOffset != null)
/**
* @return An empty target that has no indel nor SNP evidence.
*/
def emptyTarget(): IndelRealignmentTarget = {
new IndelRealignmentTarget(new HashSet[IndelRange](), new HashSet[SNPRange]())
}
}
class RangeAccumulator[T <: GenericRange] (val data : List[T], val previous : T) {
def accumulate (current: T) : RangeAccumulator[T] = {
if (previous == null)
new RangeAccumulator[T](data, current)
else
if (previous.compareRange(current) == 0)
new RangeAccumulator[T](data, previous.merge(current).asInstanceOf[T])
else
new RangeAccumulator[T](previous :: data, current)
}
}
class IndelRealignmentTarget(val indelSet: Set[IndelRange], val snpSet: Set[SNPRange]) extends Logging {
initLogging()
// the maximum range covered by either snps or indels
def readRange : NumericRange.Inclusive[Long] = {
(
indelSet.toList.map(_.getReadRange.asInstanceOf[NumericRange.Inclusive[Long]]) ++
snpSet.toList.map(_.getReadRange.asInstanceOf[NumericRange.Inclusive[Long]])
).reduce(
(a: NumericRange.Inclusive[Long], b: NumericRange.Inclusive[Long]) =>
new NumericRange.Inclusive[Long]((a.start min b.start), (a.end max b.end), 1)
)
}
/**
* Merges two indel realignment targets.
*
* @param target Target to merge in.
* @return Merged target.
*/
def merge(target: IndelRealignmentTarget): IndelRealignmentTarget = {
// TODO: this is unnecessarily wasteful; if the sets themselves
// were sorted (requires refactoring) we could achieve the same
// in a single merge (as in mergesort) operation. This should
// be done once correctness has been established
val currentIndelSet = indelSet.union(target.getIndelSet()).toArray
quickSort(currentIndelSet)
val accumulator : RangeAccumulator[IndelRange] = new RangeAccumulator[IndelRange](List(), null)
val newIndelSetAccumulated : RangeAccumulator[IndelRange] = currentIndelSet.foldLeft(accumulator) {
(acc, elem) => acc.accumulate(elem)
}
if (newIndelSetAccumulated.previous == null) // without the if we end up with a singleton set with null as element
new IndelRealignmentTarget(newIndelSetAccumulated.data.toSet, snpSet ++ target.getSNPSet)
else
new IndelRealignmentTarget(newIndelSetAccumulated.data.toSet + newIndelSetAccumulated.previous, snpSet ++ target.getSNPSet)
}
def isEmpty(): Boolean = {
indelSet.isEmpty && snpSet.isEmpty
}
def getReadRange(): NumericRange[Long] = {
if ( (snpSet != null || indelSet != null)
&& (readRange == null))
log.warn("snpSet or indelSet non-empty but readRange empty!")
readRange
}
def getSortKey(): Long = {
if (readRange != null)
readRange.start
else if( ! getIndelSet().isEmpty && getSNPSet().isEmpty)
getIndelSet().head.getReadRange().start
else if(getIndelSet().isEmpty && ! getSNPSet().isEmpty)
getSNPSet().head.getReadRange().start
else {
log.error("unknown sort key for IndelRealignmentTarget")
-1.toLong
}
}
protected[realignmenttarget] def getSNPSet(): Set[SNPRange] = snpSet
protected[realignmenttarget] def getIndelSet(): Set[IndelRange] = indelSet
}
class TreeSetSerializer extends Serializer[TreeSet[IndelRealignmentTarget]] {
def write (kryo: Kryo, output: Output, obj: TreeSet[IndelRealignmentTarget]) = {
kryo.writeClassAndObject(output, obj.toList)
}
def read (kryo: Kryo, input: Input, klazz: Class[TreeSet[IndelRealignmentTarget]]) : TreeSet[IndelRealignmentTarget] = {
new TreeSet()(TargetOrdering).union(kryo.readClassAndObject(input).asInstanceOf[List[IndelRealignmentTarget]].toSet)
}
}
| fnothaft/adam | adam-core/src/main/scala/edu/berkeley/cs/amplab/adam/algorithms/realignmenttarget/IndelRealignmentTarget.scala | Scala | apache-2.0 | 15,891 |
object MapTest{
def main(args: Array[String]){
//构造不可变Map
val scores = Map("Alice" -> 10, "Bob" -> 3, "Cindy" -> 8)
println("Bob -> "+ scores("Bob"))
//scores("Bob") = 20 不可修改
//构造可变Map
val scores1 = scala.collection.mutable.Map("Alice" -> 10, "Bob" -> 3, "Cindy" -> 8);
println("Alice -> " + scores1("Alice"))
scores1("Alice") = 30
println("修改后的 Alice -> "+ scores1("Alice"))
//构造空映射
var scores2:Map[String, Int] = Map()
scores2 += ("Cindy" -> 8, "Fred" -> 7)
println("Cindy -> "+ scores2("Cindy"))
//scores2("Cindy") = 80 //只可添加不可修改
//getOrElse,如果存在则打印对应的值, 如果不存在打印0
println(scores1.getOrElse("Bob", 0))
println(scores1.getOrElse("Alex", 0))
for(k <- scores2.keys) println("keys: " + k)
for(v <- scores2.values) println("values: " + v)
for((k,v) <- scores2) printf("key: %s, value: %d\n",k, v)
//排序
val scores3 = scala.collection.immutable.SortedMap("Alice" -> 10, "Fred" -> 7, "Bob" -> 3, "Cindy" -> 8)
println(scores3)
}
}
| PengLiangWang/Scala | Map/MapTest.scala | Scala | gpl-3.0 | 1,296 |
package models.generator.csv
import java.io.StringWriter
import io.apibuilder.generator.v0.models.{File, InvocationForm}
import io.apibuilder.spec.v0.models.{Operation, Resource, Service}
import lib.generator.CodeGenerator
import org.apache.commons.csv.{CSVFormat, CSVPrinter}
class CsvGenerator extends CodeGenerator {
final val EMPTY = ""
override def invoke(form: InvocationForm): Either[Seq[String], Seq[File]] = {
Right(generateSourceFiles(form.service))
}
def generateSourceFiles(service: Service): Seq[File] = {
Seq(generateResourcesFile(service.resources))
}
def generateResourcesFile(resources: Seq[Resource]): File = {
def makeRow(resource: Resource, operation: Operation): Seq[String] = {
val method = operation.method.toString
val path = resource.path.getOrElse(EMPTY) + operation.path
val description = operation.description.getOrElse(EMPTY)
Seq(method, path, description)
}
val rows: Seq[Seq[String]] = for {
resource <- resources
operation <- resource.operations
} yield makeRow(resource, operation)
generateSourceFile("resources.csv", Seq("method", "path", "description"), rows)
}
def generateSourceFile(fileName: String, header: Seq[String], rows: Seq[Seq[String]]): File = {
val stringWriter = new StringWriter
val csvPrinter = new CSVPrinter(stringWriter, CSVFormat.DEFAULT)
assert(rows.headOption.forall{row =>
row.length == header.length
}, "All lines must have same number of fields as header line")
csvPrinter.printRecords(header.toArray)
rows.map(row => csvPrinter.printRecords(row.toArray))
csvPrinter.flush()
File(
name = fileName,
contents = stringWriter.toString
)
}
}
object CsvGenerator extends CsvGenerator | mbryzek/apidoc-generator | csv-generator/src/main/scala/models/generator/csv/CsvGenerator.scala | Scala | mit | 1,793 |
package avrohugger
package format
package specific
package matchers
import treehugger.forest._
import definitions._
import treehuggerDSL._
import org.apache.avro.Schema
import org.codehaus.jackson.JsonNode
import org.codehaus.jackson.map.ObjectMapper
import org.codehaus.jackson.node.{NullNode, ObjectNode, TextNode}
import scala.collection.JavaConversions._
object DefaultValueMatcher {
// This code was stolen from here:
// https://github.com/julianpeeters/avro-scala-macro-annotations/blob/104fa325a00044ff6d31184fa7ff7b6852e9acd5/macros/src/main/scala/avro/scala/macro/annotations/provider/matchers/FromJsonMatcher.scala
def getDefaultValue(field: Schema.Field): Tree = {
val nullNode = new TextNode("null")
def fromJsonNode(node: JsonNode, schema: Schema): Tree = {
schema.getType match {
case _ if node == null => EmptyTree //not `default=null`, but no default
case Schema.Type.INT => LIT(node.getIntValue)
case Schema.Type.FLOAT => LIT(node.getDoubleValue.asInstanceOf[Float])
case Schema.Type.LONG => LIT(node.getLongValue)
case Schema.Type.DOUBLE => LIT(node.getDoubleValue)
case Schema.Type.BOOLEAN => LIT(node.getBooleanValue)
case Schema.Type.STRING => LIT(node.getTextValue)
case Schema.Type.ENUM => (REF(schema.getName) DOT node.getTextValue)
case Schema.Type.NULL => LIT(null)
case Schema.Type.UNION => {
val unionSchemas = schema.getTypes.toList
if (unionSchemas.length == 2 &&
unionSchemas.exists(schema => schema.getType == Schema.Type.NULL) &&
unionSchemas.exists(schema => schema.getType != Schema.Type.NULL)) {
val maybeSchema = unionSchemas.find(schema => schema.getType != Schema.Type.NULL)
maybeSchema match {
case Some(unionSchema) => {
node match {
case `nullNode` => NONE
case nn: NullNode => NONE
case nonNullNode => SOME(fromJsonNode(nonNullNode, unionSchema))
}
}
case None => throw new Exception("no avro type found in this union")
}
}
else throw new Exception("Unsupported union field")
}
case Schema.Type.ARRAY => {
LIST(node.getElements.toList.map(e => fromJsonNode(e, schema.getElementType)))
}
case Schema.Type.MAP => {
val kvps = node.getFields.toList.map(e => LIT(e.getKey) ANY_-> fromJsonNode(e.getValue, schema.getValueType))
MAKE_MAP(kvps)
}
case Schema.Type.RECORD => {
val fields = schema.getFields
val jsObject = node match {
case t: TextNode =>
val mapper = new ObjectMapper();
mapper.readValue(t.getTextValue, classOf[ObjectNode])
case o: ObjectNode => o
case _ => throw new Exception(s"Invalid default value for field: $field, value: $node")
}
val fieldValues = fields.map { f =>
fromJsonNode(jsObject.get(f.name), f.schema)
}
NEW(schema.getName, fieldValues: _*)
}
case x => throw new Exception("Can't extract a default field, type not yet supported: " + x)
}
}
fromJsonNode(field.defaultValue, field.schema)
}
} | ppearcy/avrohugger | avrohugger-core/src/main/scala/format/specific/matchers/DefaultValueMatcher.scala | Scala | apache-2.0 | 3,347 |
package mesosphere.marathon
package state
import com.wix.accord._
import com.wix.accord.dsl._
import mesosphere.marathon.api.v2.Validation._
import mesosphere.marathon.core.externalvolume.ExternalVolumes
import mesosphere.marathon.core.pod.PodDefinition
import org.jgrapht.DirectedGraph
import org.jgrapht.alg.CycleDetector
import org.jgrapht.graph._
import scala.annotation.tailrec
import scala.collection.JavaConverters._
/**
* Represents the root group for Marathon. It is a persistent data structure,
* and all of the modifying operations are defined at this level.
*/
class RootGroup(
apps: Map[AppDefinition.AppKey, AppDefinition] = Group.defaultApps,
pods: Map[PathId, PodDefinition] = Group.defaultPods,
groupsById: Map[Group.GroupKey, Group] = Group.defaultGroups,
dependencies: Set[PathId] = Group.defaultDependencies,
version: Timestamp = Group.defaultVersion) extends Group(
PathId.empty,
apps,
pods,
groupsById,
dependencies,
version) {
require(
groupsById.forall {
case (_, _: RootGroup) => false
case (_, _: Group) => true
},
"`RootGroup` cannot be a child of `RootGroup`.")
lazy val applicationDependencies: List[(AppDefinition, AppDefinition)] = {
var result = List.empty[(AppDefinition, AppDefinition)]
//group->group dependencies
for {
group <- transitiveGroupsById.values
dependencyId <- group.dependencies
dependency <- transitiveGroupsById.get(dependencyId)
app <- group.transitiveApps
dependentApp <- dependency.transitiveApps
} result ::= app -> dependentApp
//app->group/app dependencies
for {
group <- transitiveGroupsById.values.filter(_.apps.nonEmpty)
app <- group.apps.values
dependencyId <- app.dependencies
dependentApp = this.app(dependencyId).map(Set(_))
dependentGroup = transitiveGroupsById.get(dependencyId).map(_.transitiveApps)
dependent <- dependentApp orElse dependentGroup getOrElse Set.empty
} result ::= app -> dependent
result
}
/**
* This is used to compute the "layered" topological sort during deployment.
* @return The dependency graph of all the run specs in the root group.
*/
lazy val dependencyGraph: DirectedGraph[RunSpec, DefaultEdge] = {
val graph = new DefaultDirectedGraph[RunSpec, DefaultEdge](classOf[DefaultEdge])
for (runnableSpec <- transitiveRunSpecs) graph.addVertex(runnableSpec)
for ((app, dependent) <- applicationDependencies) graph.addEdge(app, dependent)
new UnmodifiableDirectedGraph(graph)
}
private[state] def runSpecsWithNoDependencies: Set[RunSpec] = {
val g = dependencyGraph
g.vertexSet.asScala.filter { v => g.outDegreeOf(v) == 0 }.toSet
}
private[state] def hasNonCyclicDependencies: Boolean = {
!new CycleDetector[RunSpec, DefaultEdge](dependencyGraph).detectCycles()
}
/**
* Add a new `Group` to the root group.
*
* If `newGroup.id` already exists in the root group, it will be replaced with `newGroup`.
* This behavior is "add-or-replace", similar to `map + (key -> value)`.
*
* Otherwise, if any intermediate groups along the path does not exist, an empty `Group` is created.
* For example, if a group with id `/foo/bar/baz` is being added to a root group with no children,
* intermediate groups `/foo` and `/foo/bar` will be created in an empty state.
* This is similar to the behavior of `mkdir -p`.
*
* Every transitive group gets the new version.
*
* @param newGroup the new group to be added
* @param version the new version of the root group
* @return the new root group with `newGroup` added.
*/
def putGroup(newGroup: Group, version: Timestamp = Group.defaultVersion): RootGroup = {
@tailrec def rebuildTree(allParents: List[PathId], result: Group): Group = {
allParents match {
case Nil => result
case head :: tail =>
val oldParent = group(head).getOrElse(Group.empty(head))
val newParent = Group(
id = oldParent.id,
apps = oldParent.apps,
pods = oldParent.pods,
groupsById = oldParent.groupsById + (result.id -> result),
dependencies = oldParent.dependencies,
version = version)
rebuildTree(tail, newParent)
}
}
RootGroup.fromGroup(updateVersion(rebuildTree(newGroup.id.allParents, newGroup), version))
}
/**
* Make a group with the specified id exist in the root group.
*
* If `groupId` already exists in the root group, do nothing.
* Otherwise, if any intermediate groups along the path does not exist, an empty `Group` is created.
* This is similar to the behavior of `mkdir -p`.
*
* The root group's version is preserved.
*
* @param groupId the id of the group to make exist
* @return the new root group with group with id `groupId`.
*/
def makeGroup(groupId: PathId): RootGroup =
group(groupId).fold(putGroup(Group.empty(groupId), version))(_ => this)
/**
* Apply an update function to every transitive app rooted at a specified group id.
*
* If `groupId` or any intermediate groups along the path does not exist, an empty `Group` is created.
* This is similar to the behavior of `mkdir -p`.
*
* Every transitive group gets the new version.
*
* @param groupId the root of the group subtree to update
* @param app the update function, which is applied to every transitive app under `groupId`.
* @param version the new version of the root group
* @return the new root group with group with id `groupId`.
*/
def updateTransitiveApps(
groupId: PathId, app: AppDefinition => AppDefinition, version: Timestamp = Group.defaultVersion): RootGroup = {
def updateApps(group: Group): Group = {
Group(
id = group.id,
apps = group.apps.map { case (appId, appDef) => appId -> app(appDef) },
pods = group.pods,
groupsById = group.groupsById.map { case (subGroupId, subGroup) => subGroupId -> updateApps(subGroup) },
dependencies = group.dependencies,
version = version)
}
val oldGroup = group(groupId).getOrElse(Group.empty(groupId))
val newGroup = updateApps(oldGroup)
putGroup(newGroup, version)
}
/**
* Update the apps of the group with the specified group id by applying the update function.
*
* If `groupId` or any intermediate groups along the path does not exist, an empty `Group` is created.
* This is similar to the behavior of `mkdir -p`.
*
* Every transitive group gets the new version.
*
* @param groupId the id of the group to be updated
* @param apps the update function, which is applied to the specified group's apps.
* @param version the new version of the root group
* @return the new root group with the specified group updated.
*/
def updateApps(
groupId: PathId,
apps: Map[AppDefinition.AppKey, AppDefinition] => Map[AppDefinition.AppKey, AppDefinition],
version: Timestamp = Group.defaultVersion): RootGroup = {
val oldGroup = group(groupId).getOrElse(Group.empty(groupId))
val oldApps = oldGroup.apps
val newApps = apps(oldApps)
val newGroup = Group(
id = oldGroup.id,
apps = newApps,
pods = oldGroup.pods,
groupsById = oldGroup.groupsById,
dependencies = oldGroup.dependencies,
version = version)
putGroup(newGroup, version)
}
/**
* Update the dependencies of the group with the specified group id by applying the update function.
*
* If `groupId` or any intermediate groups along the path does not exist, an empty `Group` is created.
* This is similar to the behavior of `mkdir -p`.
*
* Every transitive group gets the new version.
*
* @param groupId the id of the group to be updated
* @param dependencies the update function, which is applied to the specified group's dependencies.
* @param version the new version of the root group
* @return the new root group with the specified group updated.
*/
def updateDependencies(
groupId: PathId, dependencies: Set[PathId] => Set[PathId], version: Timestamp = Group.defaultVersion): RootGroup = {
val oldGroup = group(groupId).getOrElse(Group.empty(groupId))
val oldDependencies = oldGroup.dependencies
val newDependencies = dependencies(oldDependencies)
val newGroup = Group(
id = oldGroup.id,
apps = oldGroup.apps,
pods = oldGroup.pods,
groupsById = oldGroup.groupsById,
dependencies = newDependencies,
version = version
)
putGroup(newGroup, version)
}
/**
* Update the app with the specified app id by applying the update function.
*
* `fn` is invoked with the app with `appId` if one exists, otherwise it is invoked with `None`.
*
* If `appId.parent` or any intermediate groups along the path does not exist, an empty `Group` is created.
* This is similar to the behavior of `mkdir -p`.
*
* Every transitive group gets the new version.
*
* @param appId the id of the app to be updated
* @param fn the update function, which is applied to the specified app
* @param version the new version of the root group
* @return the new root group with the specified app updated.
*/
def updateApp(
appId: PathId, fn: Option[AppDefinition] => AppDefinition, version: Timestamp = Group.defaultVersion): RootGroup = {
val oldGroup = group(appId.parent).getOrElse(Group.empty(appId.parent))
val newApp = fn(app(appId))
require(newApp.id == appId, "app id must not be changed by `fn`.")
val newGroup = Group(
id = oldGroup.id,
// replace potentially existing app definition
apps = oldGroup.apps + (newApp.id -> newApp),
pods = oldGroup.pods,
// If there is a group with a conflicting id which contains no app or pod definitions,
// replace it. Otherwise do not replace it. Validation should catch conflicting app/pod/group IDs later.
groupsById = oldGroup.groupsById.filter {
case (_, group) => group.id != newApp.id || group.containsApps || group.containsPods
},
dependencies = oldGroup.dependencies,
version = version)
putGroup(newGroup, version)
}
/**
* Update the pod with the specified pod id by applying the update function.
*
* `fn` is invoked with the pod with `podId` if one exists, otherwise it is invoked with `None`.
*
* If `podId.parent` or any intermediate groups along the path does not exist, an empty `Group` is created.
* This is similar to the behavior of `mkdir -p`.
*
* Every transitive group gets the new version.
*
* @param podId the id of the pod to be updated
* @param fn the update function, which is applied to the specified pod
* @param version the new version of the root group
* @return the new root group with the specified pod updated.
*/
def updatePod(
podId: PathId, fn: Option[PodDefinition] => PodDefinition, version: Timestamp = Group.defaultVersion): RootGroup = {
val oldGroup = group(podId.parent).getOrElse(Group.empty(podId.parent))
val newPod = fn(pod(podId))
require(newPod.id == podId, "pod id must not be changed by `fn`.")
val newGroup = Group(
id = oldGroup.id,
apps = oldGroup.apps,
// replace potentially existing pod definition
pods = oldGroup.pods + (newPod.id -> newPod),
// If there is a group with a conflicting id which contains no app or pod definitions,
// replace it. Otherwise do not replace it. Validation should catch conflicting app/pod/group IDs later.
groupsById = oldGroup.groupsById.filter {
case (_, group) => group.id != newPod.id || group.containsApps || group.containsPods
},
dependencies = oldGroup.dependencies,
version = version)
putGroup(newGroup, version)
}
private def updateVersion(group: Group, version: Timestamp): Group = {
Group(
id = group.id,
apps = group.apps,
pods = group.pods,
groupsById = group.groupsById.map { case (subGroupId, subGroup) => subGroupId -> updateVersion(subGroup, version) },
dependencies = group.dependencies,
version = version)
}
/**
* Returns a new `RootGroup` where all transitive groups have their `version` set to the specified timestamp.
*
* @param version the new version of the root group.
*/
def updateVersion(version: Timestamp = Group.defaultVersion): RootGroup = {
RootGroup.fromGroup(updateVersion(this, version))
}
/**
* Remove the group with the specified group id.
*
* Every transitive group gets the new version.
*
* @param groupId the id of the group to be removed
* @param version the new version of the root group
* @return the new root group with the specified group removed.
*/
def removeGroup(groupId: PathId, version: Timestamp = Group.defaultVersion): RootGroup = {
require(!groupId.isRoot, "The root group cannot be removed.")
group(groupId).fold(updateVersion(version)) { oldGroup =>
val oldParent = transitiveGroupsById(oldGroup.id.parent)
putGroup(
Group(
id = oldParent.id,
apps = oldParent.apps,
pods = oldParent.pods,
groupsById = oldParent.groupsById - oldGroup.id,
dependencies = oldParent.dependencies,
version = version), version)
}
}
/**
* Remove the app with the specified app id.
*
* Every transitive group gets the new version.
*
* @param appId the id of the app to be removed
* @param version the new version of the root group
* @return the new root group with the specified app removed.
*/
def removeApp(appId: PathId, version: Timestamp = Group.defaultVersion): RootGroup = {
app(appId).fold(updateVersion(version)) { oldApp =>
val oldGroup = transitiveGroupsById(oldApp.id.parent)
putGroup(Group(
id = oldGroup.id,
apps = oldGroup.apps - oldApp.id,
pods = oldGroup.pods,
groupsById = oldGroup.groupsById,
dependencies = oldGroup.dependencies,
version = version), version)
}
}
/**
* Remove the pod with the specified pod id.
*
* Every transitive group gets the new version.
*
* @param podId the id of the pod to be removed
* @param version the new version of the root group
* @return the new root group with the specified pod removed.
*/
def removePod(podId: PathId, version: Timestamp = Group.defaultVersion): RootGroup = {
pod(podId).fold(updateVersion(version)) { oldPod =>
val oldGroup = transitiveGroupsById(oldPod.id.parent)
putGroup(Group(
id = oldGroup.id,
apps = oldGroup.apps,
pods = oldGroup.pods - oldPod.id,
groupsById = oldGroup.groupsById,
dependencies = oldGroup.dependencies,
version = version), version)
}
}
/**
* Returns a new `RootGroup` where all transitive groups, apps, and pods have their `version` set to `Timestamp(0)`.
*/
@SuppressWarnings(Array("PartialFunctionInsteadOfMatch"))
def withNormalizedVersions: RootGroup = {
def in(group: Group): Group = {
Group(
id = group.id,
apps = group.apps.map { case (appId, app) => appId -> app.copy(versionInfo = VersionInfo.NoVersion) },
pods = group.pods.map { case (podId, pod) => podId -> pod.copy(versionInfo = VersionInfo.NoVersion) },
groupsById = group.groupsById.map { case (subGroupId, subGroup) => subGroupId -> in(subGroup) },
dependencies = group.dependencies,
version = Timestamp(0))
}
RootGroup.fromGroup(in(this))
}
}
object RootGroup {
def apply(
apps: Map[AppDefinition.AppKey, AppDefinition] = Group.defaultApps,
pods: Map[PathId, PodDefinition] = Group.defaultPods,
groupsById: Map[Group.GroupKey, Group] = Group.defaultGroups,
dependencies: Set[PathId] = Group.defaultDependencies,
version: Timestamp = Group.defaultVersion): RootGroup = new RootGroup(apps, pods, groupsById, dependencies, version)
def empty: RootGroup = RootGroup(version = Timestamp(0))
def fromGroup(group: Group): RootGroup = {
require(group.id.isRoot)
RootGroup(group.apps, group.pods, group.groupsById, group.dependencies, group.version)
}
def rootGroupValidator(enabledFeatures: Set[String]): Validator[RootGroup] = {
noCyclicDependencies and
Group.validGroup(PathId.empty, enabledFeatures) and
ExternalVolumes.validRootGroup()
}
private def noCyclicDependencies: Validator[RootGroup] =
isTrue("Dependency graph has cyclic dependencies.") { _.hasNonCyclicDependencies }
}
| janisz/marathon | src/main/scala/mesosphere/marathon/state/RootGroup.scala | Scala | apache-2.0 | 16,771 |
package filodb.kafka
import java.lang.{Long => JLong}
import scala.collection.JavaConverters._
import scala.concurrent.Future
import com.typesafe.config.Config
import com.typesafe.scalalogging.StrictLogging
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import org.apache.kafka.clients.producer.internals.DefaultPartitioner
import org.apache.kafka.common.serialization.{ByteArraySerializer, LongSerializer}
import filodb.core.{Response, Success}
import filodb.core.downsample.DownsamplePublisher
class KafkaDownsamplePublisher(downsampleConfig: Config) extends DownsamplePublisher with StrictLogging {
private val kafkaConfig = propsFromConfig(downsampleConfig.getConfig("publisher-config.kafka"))
private val topics: Map[Int, String] = downsampleConfig.getConfig("publisher-config.topics")
.entrySet().asScala.map { e => e.getKey.toInt -> e.getValue.unwrapped().toString }.toMap
private var producer: KafkaProducer[JLong, Array[Byte]] = _
override def publish(shardNum: Int, resolution: Int, records: Seq[Array[Byte]]): Future[Response] = {
topics.get(resolution) match {
case Some(topic) =>
records.foreach { bytes =>
val rec = new ProducerRecord[JLong, Array[Byte]](topic, shardNum, shardNum.toLong: JLong,
bytes)
producer.send(rec)
}
Future.successful(Success)
case None =>
Future.failed(new IllegalArgumentException(s"Unregistered resolution $resolution"))
}
}
def propsFromConfig(config: Config): Map[String, Object] = {
val map = config.entrySet().asScala.map({ entry =>
entry.getKey -> entry.getValue.unwrapped()
}).toMap
// immutable properties to be overwritten
map ++ Map( "value.serializer" -> classOf[ByteArraySerializer],
"key.serializer" -> classOf[LongSerializer],
"partitioner.class" -> classOf[DefaultPartitioner])
}
override def start(): Unit = {
logger.info(s"Starting Kafka Downsampling Publisher. Will be publishing to $topics with config: $kafkaConfig")
producer = new KafkaProducer(kafkaConfig.asJava)
}
override def stop(): Unit = {
logger.info("Stopping Kafka Downsampling Publisher")
producer.close()
}
}
| tuplejump/FiloDB | kafka/src/main/scala/filodb/kafka/KafkaDownsamplePublisher.scala | Scala | apache-2.0 | 2,252 |
/*
MET-API
Copyright (C) 2014 met.no
Contact information:
Norwegian Meteorological Institute
Box 43 Blindern
0313 OSLO
NORWAY
E-mail: [email protected]
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
MA 02110-1301, USA
*/
package services.observations
import play.api.libs.json.JsValue
case class ElementInfo(json: JsValue)
/**
* Interface for getting info from the elements/ endpoint
*/
abstract class ElementInfoGetter {
/**
* gets the current info for a set of elements
*/
def getInfoMap(auth: Option[String], requestHost: String, elementIds: Set[String]): Map[String, ElementInfo]
}
| metno/metapi-observations | app/services/observations/ElementInfoGetter.scala | Scala | gpl-2.0 | 1,294 |
/*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.feel.impl.script
import javax.script.{CompiledScript, ScriptContext, ScriptEngine}
import org.camunda.feel.syntaxtree.ParsedExpression
case class CompiledFeelScript(engine: FeelScriptEngine,
val expression: ParsedExpression)
extends CompiledScript {
def getEngine: ScriptEngine = engine
def eval(context: ScriptContext): Object = engine.eval(this, context)
}
| camunda/feel-scala | src/main/scala/org/camunda/feel/impl/script/CompiledFeelScript.scala | Scala | apache-2.0 | 1,230 |
package dbpedia.dataparsers.util.wikiparser.impl.simple
import SimpleWikiParser._
import dbpedia.dataparsers.util.{Language, UriUtils, WikiUtil}
import dbpedia.dataparsers.util.wikiparser.impl.wikipedia.{Disambiguation, Redirect}
import dbpedia.dataparsers.util.wikiparser._
import java.net.{URI, URISyntaxException}
import java.util.logging.{Level, Logger}
import dbpedia.dataparsers.util.RichString.wrapString
object SimpleWikiParser
{
private val logger = Logger.getLogger(classOf[SimpleWikiParser].getName)
private val MaxNestingLevel = 10
private val MaxErrors = 1000
private val commentEnd = new Matcher(List("-->"));
private val htmlTagEndOrStart = new Matcher(List("/>", "<"), false);
private val refEnd = new Matcher(List("</ref>"));
private val mathEnd = new Matcher(List("</math>"));
private val codeEnd = new Matcher(List("</code>"));
private val sourceEnd = new Matcher(List("</source>"));
private val internalLinkLabelOrEnd = new Matcher(List("|", "]]", "\n"));
private val internalLinkEnd = new Matcher(List("]]", "\n"), true);
private val externalLinkLabelOrEnd = new Matcher(List(" ", "]", "\n"));
private val externalLinkEnd = new Matcher(List("]", "\n"), true);
private val linkEnd = new Matcher(List(" ", "{","}", "[", "]", "\n", "\t"));
// '|=' is not valid wiki markup but safe to include, see http://sourceforge.net/tracker/?func=detail&atid=935521&aid=3572779&group_id=190976
private val propertyValueOrEnd = new Matcher(List("|=","=", "|", "}}"), true);
private val propertyEnd = new Matcher(List("|", "}}"), true);
private val templateParameterEnd = new Matcher(List("|", "}}}"), true);
private val propertyEndOrParserFunctionNameEnd = new Matcher(List("|", "}}", ":"), true);
private val parserFunctionEnd = new Matcher(List("}}"), true);
private val tableRowEnd1 = new Matcher(List("|}", "|+", "|-", "|", "!"));
private val tableRowEnd2 = new Matcher(List("|}", "|-", "|", "!"));
private val tableCellEnd1 = new Matcher(List("\n ", "\n|}", "\n|-", "\n|", "\n!", "||", "!!", "|", "!"), true);
private val tableCellEnd2 = new Matcher(List("|}", "|-", "|", "!"));
private val tableCellEnd3 = new Matcher(List("\n ", "\n|}", "\n|-", "\n|", "\n!", "||", "!!"), true);
private val sectionEnd = new Matcher(List("=\n", "=\r", "\n"), true);
}
/**
* Port of the DBpedia WikiParser from PHP.
*/
//TODO section names should only contain the contents of the TextNodes
class SimpleWikiParser extends WikiParser
{
/**
* Parses WikiText source and builds an Abstract Syntax Tree.
*
* @param page The page to be parsed.
* @return The PageNode which represents the root of the AST
* @throws WikiParserException if an error occured during parsing
*/
def apply(page : WikiPage) : Option[PageNode] =
{
if (page.format != null && page.format.nonEmpty && page.format != "text/x-wiki")
{
return None
}
else
{
//Parse source
val nodes = parseUntil(new Matcher(List(), true), new Source(page.source, page.title.language), 0)
//Check if this page is a Redirect
// TODO: the regex used in org..extraction.mappings.Redirects.scala is probably a bit better
val redirectRegex = """(?is)\s*(?:""" + Redirect(page.title.language).mkString("|") + """)\s*:?\s*\[\[.*"""
// TODO: also extract the redirect target.
// TODO: compare extracted redirect target to the one found by Wikipedia (stored in the WikiPage object).
// Problems:
// - if the WikiPage object was not read from XML dump or api.php, redirect may not be set in WikiPage
// - generating the XML dump files takes several days, and the wikitext is obviously not generated at the
// same time as the redirect target, so sometimes they do not match.
// In a nutshell: if the redirect in WikiPage is different from what we find, we're probably correct.
val isRedirect = page.source.matches(redirectRegex)
//Check if this page is a Disambiguation
//TODO resolve template titles
val disambiguationNames = Disambiguation.get(page.title.language).getOrElse(Set("Disambig"))
val isDisambiguation = nodes.exists(node => findTemplate(node, disambiguationNames, page.title.language))
//Return page node
Some(new PageNode(page.title, page.id, page.revision, page.timestamp, page.contributorID, page.contributorName, isRedirect, isDisambiguation, nodes))
}
}
def parseString(string:String): List[Node] = {
parseUntil(new Matcher(List(), true), new Source(string, Language.English), 0)
//parseProperty(new Source(string, Language.English), "unknown", 0)
}
private def findTemplate(node : Node, names : Set[String], language : Language) : Boolean = node match
{
case TemplateNode(title, _, _, _) => names.contains(title.decoded)
case _ => node.children.exists(node => findTemplate(node, names, language))
}
private def parseUntil(matcher : Matcher, source : Source, level : Int) : List[Node] =
{
val line = source.line
//Check nesting level
if(level > MaxNestingLevel)
{
throw new WikiParserException("Maximum nesting level exceeded", line, source.findLine(line))
}
//Check number of errors
if(source.errors > MaxErrors)
{
throw new TooManyErrorsException(line, source.findLine(line))
}
var nodes = List[Node]()
var lastPos = source.pos
var lastLine = source.line
var currentText = ""
while(true)
{
val m = source.find(matcher, false);
//Add text
if(m.matched && source.pos - lastPos > m.tag.length)
{
currentText += source.getString(lastPos, source.pos - m.tag.length)
}
else if(!m.matched)
{
currentText += source.getString(lastPos, source.pos)
}
//If this text is at the beginning => remove leading whitespace
if(nodes.isEmpty)
{
currentText = currentText.replaceAll("^\\s+", "")
}
//If this text is at the end => remove trailing whitespace and return
if((!m.matched && level == 0) || !m.isStdTag)
{
if(currentText.isEmpty)
{
return nodes.reverse
}
else
{
nodes ::= TextNode(currentText, lastLine)
return nodes.reverse
}
}
//Check result of seek
if(!m.matched)
{
// FIXME: matcher.toString is not defined, message will be useless
throw new WikiParserException("Node not closed; expected "+matcher, line, source.findLine(line));
}
else
{
if(source.lastTag("<!--"))
{
//Skip html comment
source.find(commentEnd, false)
}
else if(source.lastTag("<ref"))
{
//Skip reference
skipHtmlTag(source, refEnd)
}
else if(source.lastTag("<math"))
{
//Skip math tag
skipHtmlTag(source, mathEnd)
}
else if(source.lastTag("<code"))
{
//Skip code tag
skipHtmlTag(source, codeEnd)
}
else if(source.lastTag("<source"))
{
//Skip source tag
skipHtmlTag(source, sourceEnd)
}
else
{
val startPos = source.pos
val startLine = source.line
try
{
//Parse new node
val newNode = createNodes(source, level + 1)
//Add text node
if(!currentText.isEmpty)
{
nodes ::= TextNode(currentText, lastLine)
currentText = ""
}
//Add new node
nodes :::= newNode
}
catch
{
case ex : TooManyErrorsException => throw ex
case ex : WikiParserException =>
{
logger.log(Level.FINE, "Error parsing node. "+ex.getMessage, ex)
source.pos = startPos
source.line = startLine
source.errors += 1
currentText += m.tag
}
}
}
}
lastPos = source.pos
lastLine = source.line
}
nodes.reverse
}
private def skipHtmlTag(source : Source, matcher : Matcher)
{
source.find(htmlTagEndOrStart, false)
if(source.lastTag("<"))
{
val endString = matcher.userTags.headOption
.getOrElse(throw new IllegalArgumentException("Matcher must have one closing HTML tag"))
.substring(1) // cut the first "<"
if(source.nextTag(endString))
{
source.seek(endString.length())
}
else
{
source.find(matcher, false)
}
}
//else we found "/>"
}
private def createNodes(source : Source, level : Int) : List[Node] =
{
if(source.lastTag("[") || source.lastTag("http"))
{
parseLink(source, level)
}
else if(source.lastTag("{{"))
{
if (source.pos < source.length && source.getString(source.pos, source.pos+1) == "{")
{
source.pos = source.pos+1 //advance 1 char
return List(parseTemplateParameter(source, level))
}
parseTemplate(source, level)
}
else if(source.lastTag("{|"))
{
List(parseTable(source, level))
}
else if(source.lastTag("\n="))
{
List(parseSection(source))
}
else
throw new WikiParserException("Unknown element type", source.line, source.findLine(source.line));
}
/**
* Try to parse a link node.
* Pay attention to invalid ExternalLinkNodes, as they are very likely to be plain text nodes
*
* @param source
* @param level
* @return
*/
private def parseLink(source : Source, level : Int) : List[Node] =
{
val startPos = source.pos
val startLine = source.line
if(source.lastTag("[["))
{
// FIXME: this block is a 98% copy of the next block
//val m = source.find(internalLinkLabelOrEnd)
//Set destination
//val destination = source.getString(startPos, source.pos - m.tag.length).trim
val destination = parseUntil(internalLinkLabelOrEnd, source, level)
//destination is the parsed destination (will be used by e.g. the witkionary module)
val destinationUri =
if(destination.size == 0) {
""
} else if(destination(0).isInstanceOf[TextNode]) {
destination(0).asInstanceOf[TextNode].text
} else {
throw new WikiParserException("Failed to parse internal link: " + destination, startLine, source.findLine(startLine))
}
//Parse label
val nodes =
if(source.lastTag("|"))
{
parseUntil(internalLinkEnd, source, level)
}
else
{
//No label found => Use destination as label
List(new TextNode(destinationUri, source.line))
}
/**
* At the moment, link parsing does not support nested constructs like templates, etc so we have to check it manually here
* this is mainly hacked to support cases like [[{{#property:p38}}]] or [[{{#property:p38|from=Qxxx}}]]
*/
val templStart = "{{"
val templEnd = "}}"
val label = nodes.map(_.toPlainText).mkString(" ").trim
var adjujstedDestinationUri = destinationUri
var adjustedNodes = nodes
if (destinationUri.contains(templStart) || label.contains(templEnd)) // there is a template to define the link
{
//get the text inside the link
val newText = if (destinationUri.equals(label)) destinationUri else destinationUri + "|" + label
//reparse the text
val newSource = new Source(newText, source.language)
newSource.line = source.line
val newNodes = parseUntil(new Matcher(List(), true), newSource, 0)
val newNodesToText = newNodes.map(_.toPlainText).mkString(" ").trim
if (newNodesToText.isEmpty && !newNodes.isEmpty)
{
return newNodes
} else if (!newNodesToText.contains('|')) // same target / label
{
adjujstedDestinationUri = newNodesToText
adjustedNodes = newNodes
} else if (newNodesToText.contains('|'))//we need to split the label from the link
{
adjujstedDestinationUri = newNodesToText.substring(0,newNodesToText.indexOf('|'))
adjustedNodes = NodeUtil.splitNodes(newNodes, "|").drop(0).flatten //remove 1st part
}
}
try {
List(createInternalLinkNode(source, adjujstedDestinationUri, adjustedNodes, startLine, destination))
} catch {
// This happens when en interwiki link has a language that is not defined and thows an unknown namespace error
case e: IllegalArgumentException => throw new WikiParserException("Failed to parse internal link: " + destination, startLine, source.findLine(startLine))
}
}
else if(source.lastTag("["))
{
// FIXME: this block is a 98% copy of the previous block
//val tag = source.find(externalLinkLabelOrEnd)
//Set destination
//val destinationURI = source.getString(startPos, source.pos - 1).trim
val destination = parseUntil(externalLinkLabelOrEnd, source, level)
//destination is the parsed destination (will be used by e.g. the witkionary module)
val destinationURI =
if (destination.size == 0) {
""
} else if(destination(0).isInstanceOf[TextNode]) {
destination(0).asInstanceOf[TextNode].text
} else {
// The following line didn't make sense. createExternalLinkNode() will simply throw a NullPointerException.
// null // has a semantic within the wiktionary module, and should never occur for wikipedia
throw new WikiParserException("Failed to parse external link: " + destination, startLine, source.findLine(startLine))
}
var hasLabel = true
//Parse label
val nodes =
if(source.lastTag(" "))
{
parseUntil(externalLinkEnd, source, level);
}
else
{
//No label found => Use destination as label
hasLabel = false
List(new TextNode(destinationURI, source.line))
}
try {
List(createExternalLinkNode(source, destinationURI, nodes, startLine, destination))
} catch {
case _ : WikiParserException => // if the URL is not valid then it is a plain text node
List(new TextNode("[" + destinationURI + (if (hasLabel) " " + nodes.map(_.toPlainText).mkString else "") + "]", source.line))
}
}
else
{
val result = source.find(linkEnd)
//The end tag (e.g. ' ') is not part of the link itself
source.seek(-result.tag.length)
//Set destination
val destinationURI = source.getString(startPos - 4, source.pos).trim
//Use destination as label
val nodes = List(new TextNode(destinationURI, source.line))
List(createExternalLinkNode(source, destinationURI, nodes, startLine, nodes))
}
}
private def createExternalLinkNode(source : Source, destination : String, nodes : List[Node], line : Int, destinationNodes : List[Node]) : LinkNode =
{
try
{
// TODO: Add a validation routine which conforms to Mediawiki
// This will fail for news:// or gopher:// protocols
//See http://www.mediawiki.org/wiki/Help:Links#External_links
val relProtocolDest = if (destination.startsWith("//")) "http:" + destination else destination
// Do not accept non-absolute links because '[]' can be used as wiki text
// e.g. CC1=CC(=CC(=C1O)[N+](=O)[O-])[N+](=O)[O-]
if (!UriUtils.hasKnownScheme(relProtocolDest)) throw new WikiParserException("Invalid external link: " + destination, line, source.findLine(line))
val sameHost = if (relProtocolDest.contains("{{SERVERNAME}}")) relProtocolDest.replace("{{SERVERNAME}}", source.language.baseUri.replace("http://", "")) else relProtocolDest
ExternalLinkNode(new URI(sameHost), nodes, line, destinationNodes)
}
catch
{
// As per URL.toURI documentation non-strictly RFC 2396 compliant URLs cannot be parsed to URIs
case _ : URISyntaxException => throw new WikiParserException("Invalid external link: " + destination, line, source.findLine(line))
}
}
private def createInternalLinkNode(source : Source, destination : String, nodes : List[Node], line : Int, destinationNodes : List[Node]) : LinkNode =
{
val destinationTitle = WikiTitle.parse(destination, source.language)
if(destinationTitle.language == source.language)
{
InternalLinkNode(destinationTitle, nodes, line, destinationNodes)
}
else
{
InterWikiLinkNode(destinationTitle, nodes, line, destinationNodes)
}
}
private def parseTemplateParameter(source : Source, level : Int) : TemplateParameterNode =
{
val line = source.line
val keyNodes = parseUntil(templateParameterEnd , source, level)
if(keyNodes.size != 1 || ! keyNodes.head.isInstanceOf[TextNode])
throw new WikiParserException("Template variable contains invalid elements", line, source.findLine(line))
// FIXME: removing "<includeonly>" here is a hack.
// We need a preprocessor that resolves stuff like <includeonly>...</includeonly>
// based on configuration flags.
val key = keyNodes.head.toWikiText.replace("<includeonly>", "").replace("</includeonly>", "").replace("<noinclude>", "").replace("</noinclude>", "")
// FIXME: parseUntil(templateParameterEnd) should be correct. Without it, we don't actually
// consume the source until the end of the template parameter. But if we use it, the parser
// fails for roughly twice as many pages, so for now we deactivate it with "if (true)".
val nodes = if (true || source.lastTag("}}}")) List.empty else parseUntil(templateParameterEnd, source, level)
new TemplateParameterNode(key, nodes, line)
}
private def parseTemplate(source : Source, level : Int) : List[Node] =
{
val startLine = source.line
var title : WikiTitle = null;
var properties = List[PropertyNode]()
var curKeyIndex = 1
while(true)
{
//The first entry denotes the name of the template or parser function
if(title == null)
{
val nodes = parseUntil(propertyEndOrParserFunctionNameEnd, source, level)
val templateName = nodes match
{
case TextNode(text, _) :: _ => text
case _ => throw new WikiParserException("Invalid Template name", startLine, source.findLine(startLine))
}
val decodedName = WikiUtil.cleanSpace(templateName).capitalize(source.language.locale)
if(source.lastTag(":"))
{
return List(parseParserFunction(decodedName, source, level))
}
title = new WikiTitle(decodedName, Namespace.Template, source.language)
}
else
{
val propertyNode = parseProperty(source, curKeyIndex.toString, level)
properties ::= propertyNode
if(propertyNode.key == curKeyIndex.toString)
{
curKeyIndex += 1
}
}
//Reached template end?
if(source.lastTag("}}"))
{
// TODO: Find a way to leverage template redirects!!!!
return TemplateNode.transform(new TemplateNode(title, properties.reverse, startLine))
}
}
throw new WikiParserException("Template not closed", startLine, source.findLine(startLine))
}
def parseProperty(source : Source, defaultKey : String, level : Int) : PropertyNode =
{
val line = source.line
var nodes = parseUntil(propertyValueOrEnd, source, level)
var key = defaultKey
if(source.lastTag("="))
{
//The currently parsed node is a key
if(nodes.size != 1 || !nodes.head.isInstanceOf[TextNode])
throw new WikiParserException("Template property key contains invalid elements", line, source.findLine(line))
key = nodes.head.retrieveText.get.trim
//Parse the corresponding value
nodes = parseUntil(propertyEnd, source, level)
}
PropertyNode(key, nodes, line)
}
def parseProperty(property : String, language: Language = Language.English) : PropertyNode = {
parseProperty(new Source(property, language), "unknown", 0)
}
private def parseParserFunction(decodedName : String, source : Source, level : Int) : ParserFunctionNode =
{
val children = parseUntil(parserFunctionEnd, source, level)
val startLine = source.line
ParserFunctionNode(decodedName, children, startLine)
}
private def parseTable(source : Source, level : Int) : TableNode =
{
val startPos = source.pos
val line = source.line
var nodes = List[TableRowNode]()
var caption : Option[String] = None
//Parse rows
var done = false
while(!done)
{
//Find first row
val m = source.find(tableRowEnd1) //"|}", "|+", "|-", "|", "!"
val tag = m.tagIndex
if(tag == 0) //"|}"
{
//Reached table end
done = true
}
else if(tag == 1) //"|+"
{
//Found caption
caption = Some(source.getString(startPos, source.pos - 2).trim)
}
else
{
if(tag == 2) //"|-"
{
//Move to first cell
val m2 = source.find(tableRowEnd2) //"|}", "|-", "|", "!"
if(m2.tagIndex == 0 || m2.tagIndex == 1)
{
//Empty row
nodes ::= new TableRowNode(List.empty, source.line)
return TableNode(caption, nodes.reverse, line);
}
}
//Parse row
nodes ::= parseTableRow(source, level)
//Reached table end?
if(source.lastTag("|}"))
{
done = true
}
}
}
TableNode(caption, nodes.reverse, line);
}
private def parseTableRow(source : Source, level : Int) : TableRowNode =
{
val line = source.line
var nodes = List[TableCellNode]()
while(true)
{
//Parse table cell
nodes ::= parseTableCell(source, level)
//Reached row end?
if(source.lastTag("|}") || source.lastTag("|-"))
{
return new TableRowNode(nodes.reverse, line)
}
}
null
}
private def parseTableCell(source : Source, level : Int) : TableCellNode =
{
val startPos = source.pos
val startLine = source.line
var rowspan = 1
var colspan = 1
var nodes = parseUntil(tableCellEnd1, source, level)
val lookBack = source.getString(source.pos - 2, source.pos)
if(lookBack == "\n ")
{
source.find(tableCellEnd2)
}
else if((lookBack(1) == '|' || lookBack(1) == '!') && lookBack(0) != '\n' && lookBack(0) != '|' && lookBack(0) != '!' && !nodes.isEmpty)
{
//This cell contains formatting parameters
val formattingStr = source.getString(startPos, source.pos - 1).trim
rowspan = parseTableParam("rowspan", formattingStr)
colspan = parseTableParam("colspan", formattingStr)
//Parse the cell contents
nodes = this.parseUntil(tableCellEnd3, source, level)
if(source.lastTag("\n "))
{
source.find(tableCellEnd2);
}
}
new TableCellNode(nodes, startLine, rowspan, colspan)
}
private def parseTableParam(name : String, str : String) : Int =
{
//Find start index of the value
var start = str.indexOf(name);
if(start == -1)
{
return 1;
}
start = str.indexOf('=', start)
if(start == -1)
{
return 1;
}
start += 1;
//Find end index of the value
var end = str.indexOf(' ', start)
if(end == -1)
{
end = str.length - 1;
}
//Convert to integer
var valueStr = str.substring(start, end + 1)
valueStr = valueStr.replace("\"", "").trim
try
{
valueStr.toInt;
}
catch
{
case _ : NumberFormatException => 1
}
}
private def parseSection(source : Source) : SectionNode =
{
val line = source.line
//Determine level
var level = 1
while(source.nextTag("="))
{
level += 1
source.seek(1)
}
//Get name
val startPos = source.pos
val nodes = this.parseUntil(sectionEnd, source, level)
source.seek(-1)
if(nodes.isEmpty)
{
throw new WikiParserException("Section was not closed", line, source.findLine(line))
}
val endPos = source.pos - level - 1
if(endPos <= startPos)
{
throw new WikiParserException("Invalid section tag", line, source.findLine(line))
}
val name = source.getString(startPos, endPos).trim
//Remove trailing '=' from section name
if(nodes.last.isInstanceOf[TextNode] && nodes.last.asInstanceOf[TextNode].text.endsWith("=")){
val lastTextNode = nodes.last.asInstanceOf[TextNode]
val cleanNodes = nodes.init :+ lastTextNode.copy(text = lastTextNode.text.dropRight(level - 1))
return SectionNode(name, level, cleanNodes, source.line - 1);
}
SectionNode(name, level, nodes, source.line - 1);
}
}
| FnOio/dbpedia-parsing-functions-scala | src/main/scala/dbpedia/dataparsers/util/wikiparser/impl/simple/SimpleWikiParser.scala | Scala | gpl-2.0 | 28,626 |
package com.wavesplatform
import cats.syntax.either._
import com.wavesplatform.account.PrivateKey
import com.wavesplatform.block.Block.{TransactionProof, TransactionsMerkleTree}
import com.wavesplatform.common.merkle.Merkle._
import com.wavesplatform.block.validation.Validators._
import com.wavesplatform.common.state.ByteStr
import com.wavesplatform.protobuf.transaction.PBTransactions
import com.wavesplatform.settings.GenesisSettings
import com.wavesplatform.transaction.Transaction
import scala.util.Try
package object block {
// Validation
private[block] implicit class BlockValidationOps(val block: Block) extends AnyVal {
def validate: Validation[Block] = validateBlock(block)
def validateToTry: Try[Block] = toTry(validateBlock(block))
def validateGenesis(gs: GenesisSettings): Validation[Block] = validateGenesisBlock(block, gs)
}
private[block] implicit class MicroBlockValidationOps(val microBlock: MicroBlock) extends AnyVal {
def validate: Validation[MicroBlock] = validateMicroBlock(microBlock)
def validateToTry: Try[MicroBlock] = toTry(validateMicroBlock(microBlock))
}
private def toTry[A](result: Validation[A]): Try[A] = result.leftMap(ge => new IllegalArgumentException(ge.err)).toTry
// Sign
private[block] implicit class BlockSignOps(val block: Block) extends AnyVal {
def sign(signer: PrivateKey): Block = block.copy(signature = crypto.sign(signer, block.bodyBytes()))
}
private[block] implicit class MicroBlockSignOps(val microBlock: MicroBlock) extends AnyVal {
def sign(signer: PrivateKey): MicroBlock = microBlock.copy(signature = crypto.sign(signer, microBlock.bytesWithoutSignature()))
}
def transactionProof(transaction: Transaction, transactionData: Seq[Transaction]): Option[TransactionProof] =
transactionData.indexWhere(transaction.id() == _.id()) match {
case -1 => None
case idx => Some(TransactionProof(transaction.id(), idx, mkProofs(idx, mkMerkleTree(transactionData)).reverse))
}
implicit class MerkleTreeOps(private val levels: TransactionsMerkleTree) extends AnyVal {
def transactionsRoot: ByteStr = {
require(levels.nonEmpty && levels.head.nonEmpty, "Invalid merkle tree")
ByteStr(levels.head.head)
}
}
def mkMerkleTree(txs: Seq[Transaction]): TransactionsMerkleTree = mkLevels(txs.map(PBTransactions.protobuf(_).toByteArray))
def mkTransactionsRoot(version: Byte, transactionData: Seq[Transaction]): ByteStr =
if (version < Block.ProtoBlockVersion) ByteStr.empty
else mkLevels(transactionData.map(PBTransactions.protobuf(_).toByteArray)).transactionsRoot
}
| wavesplatform/Waves | node/src/main/scala/com/wavesplatform/block/package.scala | Scala | mit | 2,686 |
package com.a.eye.gemini.analysis.util
import java.util.Date
object DateUtilTest {
def main(args: Array[String]): Unit = {
val partition = 0
val tcpTime = new Date().getTime
val atom = new AtomTimeSlotUtil().compareSlotTime(tcpTime)
println(atom)
val atoms = atom.split("-")
println(DateUtil.date2String(atoms.apply(0).toLong) + " - " + DateUtil.date2String(atoms.apply(1).toLong))
val hour = new HourTimeSlotUtil().compareSlotTime(tcpTime)
println(hour)
val hours = hour.split("-")
println(DateUtil.date2String(hours.apply(0).toLong) + " - " + DateUtil.date2String(hours.apply(1).toLong))
val day = new DayTimeSlotUtil().compareSlotTime(tcpTime)
println(day)
val days = day.split("-")
println(DateUtil.date2String(days.apply(0).toLong) + " - " + DateUtil.date2String(days.apply(1).toLong))
val week = new WeekTimeSlotUtil().compareSlotTime(tcpTime)
println(week)
val weeks = week.split("-")
println(DateUtil.date2String(weeks.apply(0).toLong) + " - " + DateUtil.date2String(weeks.apply(1).toLong))
val month = new MonthTimeSlotUtil().compareSlotTime(tcpTime)
println(month)
val months = month.split("-")
println(DateUtil.date2String(months.apply(0).toLong) + " - " + DateUtil.date2String(months.apply(1).toLong))
}
} | skywalking-developer/gemini | gemini-analysis/src/test/scala/com/a/eye/gemini/analysis/util/DateUtilTest.scala | Scala | apache-2.0 | 1,316 |
package tethys.derivation.impl
import scala.reflect.macros.blackbox
/**
* Created by eld0727 on 23.04.17.
*/
trait LoggingUtils {
val c: blackbox.Context
def info(msg: => String, force: Boolean = false): Unit = c.info(c.enclosingPosition, msg, force)
def warn(msg: String): Unit = c.warning(c.enclosingPosition, msg)
def error(msg: String): Unit = c.error(c.enclosingPosition, msg)
def abort(msg: String): Nothing = c.abort(c.enclosingPosition, msg)
}
| tethys-json/tethys | modules/macro-derivation/src/main/scala/tethys/derivation/impl/LoggingUtils.scala | Scala | apache-2.0 | 469 |
package fpinscala.laziness
trait Stream[+A] {
def toList :List[A] = this match {
case Empty => Nil
case Cons(h, t) => h() :: t().toList
}
def foldRight[B](z: => B)(f: (A, => B) => B): B = // The arrow `=>` in front of the argument type `B` means that the function `f` takes its second argument by name and may choose not to evaluate it.
this match {
case Cons(h,t) => f(h(), t().foldRight(z)(f)) // If `f` doesn't evaluate its second argument, the recursion never occurs.
case _ => z
}
def exists(p: A => Boolean): Boolean =
foldRight(false)((a, b) => p(a) || b) // Here `b` is the unevaluated recursive step that folds the tail of the stream. If `p(a)` returns `true`, `b` will never be evaluated and the computation terminates early.
@annotation.tailrec
final def find(f: A => Boolean): Option[A] = this match {
case Empty => None
case Cons(h, t) => if (f(h())) Some(h()) else t().find(f)
}
def take(n: Int): Stream[A] =
if(n > 0) {
this match {
case Empty => Empty
case Cons(h,_) if n == 0 => Stream.cons(h(), Empty)
case Cons(h,t) => Stream.cons(h(), t().take(n-1))
}
} else {
Empty
}
def drop(n: Int): Stream[A] =
if (n <= 0) {
this
} else {
this match {
case Empty => Empty
case Cons(h,t) => t().drop(n-1)
}
}
def takeWhile(p: A => Boolean): Stream[A] = this match {
case Cons(h,t) if p(h()) => Stream.cons(h(), t() takeWhile p)
case _ => Empty
}
def forAll(p: A => Boolean): Boolean = foldRight(true)((a,b) => p(a) && b)
def takeWhile2(p: A => Boolean): Stream[A] = foldRight(Empty:Stream[A])((a,b) => if (p(a)) Stream.cons(a, b) else Empty)
def headOption : Option[A] =
foldRight(Option.empty:Option[A])((a:A,b) => Option(a) )
def map[B](f: A => B) : Stream[B] = foldRight(Empty:Stream[B])((a,b) => Stream.cons(f(a), b) )
def filter(p: A => Boolean) : Stream[A] = foldRight(Empty:Stream[A])((a,b) => if(p(a)) Stream.cons(a, b) else b )
def append[B>:A](s:Stream[B]): Stream[B] =
foldRight(s)((a,b) => Stream.cons(a,b))
def flatMap[B](f: A => Stream[B]) : Stream[B] =
foldRight(Empty:Stream[B])((a,b) => f(a).append(b) )
def startsWith[A](s:Stream[A]) : Boolean = Stream.unfold((this,s)) {
case (Empty, Empty) => None
case (Empty, _) => Some(false, (Empty, Empty))
case (_, Empty) => None
case (Cons(h1,t1), Cons(h2,t2)) if h1() == h2() => Some(true, (t1(), t2()))
case _ => Some(false, (Empty, Empty))
}.forAll(r => r)
def mapUnfold[B](f: A => B): Stream[B] = Stream.unfold(this){
case Empty => None
case Cons(h, t) => Some((f(h()),t()))
}
def takeUnfold(n:Int) : Stream[A] = Stream.unfold((n, this)) {
case (_, Empty) => None
case (m, _) if m <= 0 => None
case (m, Cons(h,t)) => Some(h(),(m-1,t()))
}
def takeWhileUnfold(p: A => Boolean) : Stream[A] = Stream.unfold(this) {
case Empty => None
case Cons(h, t) if p(h()) => Some(h(), t())
case _ => None
}
def zipWithUnfold[B](sb: Stream[B]) : Stream[(A,B)] = Stream.unfold((this,sb)) {
case (Empty,_) => None
case (_,Empty) => None
case (Cons(h1,t1),Cons(h2,t2)) => Some((h1(),h2()), (t1(),t2()))
}
def zipAllUnfold[B](sb: Stream[B]) : Stream[(Option[A], Option[B])] = Stream.unfold((this,sb)){
case (Empty,Empty) => None
case (Empty,Cons(h,t)) => {
// without implicit types Scala does not compile
val a: (Option[A], Option[B]) = (None, Some(h()))
Some(a, (Empty,t()))
}
case (Cons(h,t),Empty) => {
val a: (Option[A], Option[B]) = (Some(h()), None)
val s: (Stream[A], Stream[B]) = (t(), Empty)
Some(a, s)
}
case (Cons(h1,t1),Cons(h2,t2)) => Some(
(Some(h1()),Some(h2())),
(t1(),t2())
)
}
}
case object Empty extends Stream[Nothing]
case class Cons[+A](h: () => A, t: () => Stream[A]) extends Stream[A]
object Stream {
def cons[A](hd: => A, tl: => Stream[A]): Stream[A] = {
lazy val head = hd
lazy val tail = tl
Cons(() => head, () => tail)
}
def empty[A]: Stream[A] = Empty
def apply[A](as: A*): Stream[A] =
if (as.isEmpty) empty
else cons(as.head, apply(as.tail: _*))
val ones: Stream[Int] = Stream.cons(1, ones)
def constant(n:Int): Stream[Int] = Stream.cons(n, constant(n))
def from(n: Int): Stream[Int] = Stream.cons(n, from(n+1))
val fibo : Stream[Int] = {
def go(n0:Int, n1:Int) : Stream[Int] =
cons(n0, go(n1, n0+n1))
go(0,1)
}
def unfold[A, S](z: S)(f: S => Option[(A, S)]): Stream[A] = {
def go(n:S) : Stream[A] = {
f(n) match {
case Some((a,s)) => cons(a, go(s))
case None => empty
}
}
go(z)
}
def fiboUnfold = unfold((0,1)){ case (n0,n1) => Some(n0,(n1,n0+n1)) }
def fromUnfold(n:Int) = unfold(n){ s => Some(s, s+1) }
def constantUnfold(n:Int) = unfold(n){ s => Some(s, s) }
val onesUnfold = constantUnfold(1)
} | karolchmist/fpinscala | exercises/src/main/scala/fpinscala/laziness/Stream.scala | Scala | mit | 4,980 |
package immortan.utils
import fr.acinq.bitcoin.Crypto.PublicKey
import fr.acinq.bitcoin.{BtcAmount, Satoshi, SatoshiLong}
import fr.acinq.eclair._
import fr.acinq.eclair.payment.PaymentRequest
import fr.acinq.eclair.router.Graph.GraphStructure
import fr.acinq.eclair.router.RouteCalculation
import fr.acinq.eclair.wire.NodeAddress
import immortan.crypto.Tools.trimmed
import immortan.utils.InputParser._
import immortan.utils.uri.Uri
import immortan.{LNParams, RemoteNodeInfo}
import scodec.bits.ByteVector
import scala.util.matching.{Regex, UnanchoredRegex}
import scala.util.parsing.combinator.RegexParsers
import scala.util.{Failure, Success, Try}
object InputParser {
var value: Any = new String
case object DoNotEraseRecordedValue
type Checker = PartialFunction[Any, Any]
def checkAndMaybeErase(fun: Checker): Unit = fun(value) match {
case DoNotEraseRecordedValue => // Do nothing, value is retained
case _ => value = null // Erase recorded value
}
private[this] val prefixes = PaymentRequest.prefixes.values mkString "|"
private[this] val lnUrl = "(?im).*?(lnurl)([0-9]+[a-z0-9]+)".r.unanchored
private[this] val shortNodeLink = "([a-fA-F0-9]{66})@([a-zA-Z0-9:.\\\\-_]+)".r.unanchored
val nodeLink: UnanchoredRegex = "([a-fA-F0-9]{66})@([a-zA-Z0-9:.\\\\-_]+):([0-9]+)".r.unanchored
val lnPayReq: UnanchoredRegex = s"(?im).*?($prefixes)([0-9]{1,}[a-z0-9]+){1}".r.unanchored
val identifier: Regex = "^([a-zA-Z0-9][a-zA-Z0-9\\\\-_.]*)?[a-zA-Z0-9]@([a-zA-Z0-9][a-zA-Z0-9\\\\-]{0,61}[a-zA-Z0-9]\\\\.)+[a-zA-Z0-9][a-zA-Z0-9\\\\-]{0,61}[a-zA-Z0-9]$".r
val lightning: String = "lightning:"
val bitcoin: String = "bitcoin:"
def recordValue(raw: String): Unit = value = parse(raw)
def parse(rawInput: String): Any = rawInput take 2880 match {
case lnUrl(prefix, data) => LNUrl.fromBech32(s"$prefix$data")
case nodeLink(key, host, port) => RemoteNodeInfo(PublicKey.fromBin(ByteVector fromValidHex key), NodeAddress.fromParts(host, port.toInt), host)
case shortNodeLink(key, host) => RemoteNodeInfo(PublicKey.fromBin(ByteVector fromValidHex key), NodeAddress.fromParts(host, port = 9735), host)
case _ =>
val withoutSlashes = PaymentRequestExt.removePrefix(rawInput).trim
val isLightningInvoice = lnPayReq.findFirstMatchIn(rawInput).isDefined
val isIdentifier = identifier.findFirstMatchIn(withoutSlashes).isDefined
val addressToAmount = MultiAddressParser.parseAll(MultiAddressParser.parse, rawInput)
if (isIdentifier) LNUrl.fromIdentifier(withoutSlashes)
else if (isLightningInvoice) PaymentRequestExt.fromUri(withoutSlashes.toLowerCase)
else addressToAmount getOrElse BitcoinUri.fromRaw(s"$bitcoin$withoutSlashes")
}
}
object PaymentRequestExt {
def removePrefix(raw: String): String = raw.split(':').toList match {
case prefix :: content if lightning.startsWith(prefix.toLowerCase) => content.mkString.replace("//", "")
case prefix :: content if bitcoin.startsWith(prefix.toLowerCase) => content.mkString.replace("//", "")
case _ => raw
}
def withoutSlashes(prefix: String, uri: Uri): String = prefix + removePrefix(uri.toString)
def fromUri(invoiceWithoutSlashes: String): PaymentRequestExt = {
val lnPayReq(invoicePrefix, invoiceData) = invoiceWithoutSlashes
val uri = Try(Uri parse s"$lightning//$invoiceWithoutSlashes")
val pr = PaymentRequest.read(s"$invoicePrefix$invoiceData")
PaymentRequestExt(uri, pr, s"$invoicePrefix$invoiceData")
}
def from(pr: PaymentRequest): PaymentRequestExt = {
val noUri: Try[Uri] = Failure(new RuntimeException)
PaymentRequestExt(noUri, pr, PaymentRequest write pr)
}
}
case class PaymentRequestExt(uri: Try[Uri], pr: PaymentRequest, raw: String) {
def isEnough(collected: MilliSatoshi): Boolean = pr.amount.exists(requested => collected >= requested)
def withNewSplit(anotherPart: MilliSatoshi): String = s"$lightning$raw?splits=" + (anotherPart :: splits).map(_.toLong).mkString(",")
lazy val extraEdges: Set[GraphStructure.GraphEdge] = RouteCalculation.makeExtraEdges(pr.routingInfo, pr.nodeId)
val splits: List[MilliSatoshi] = uri.map(_.getQueryParameter("splits").split(',').toList.map(_.toLong) map MilliSatoshi.apply).getOrElse(Nil)
val hasSplitIssue: Boolean = pr.amount.exists(splits.sum + LNParams.minPayment > _) || (pr.amount.isEmpty && splits.nonEmpty)
val splitLeftover: MilliSatoshi = pr.amount.map(_ - splits.sum).getOrElse(0L.msat)
val descriptionOpt: Option[String] = pr.description.left.toOption.map(trimmed).filter(_.nonEmpty)
val brDescription: String = descriptionOpt.map(desc => s"<br><br>$desc").getOrElse(new String)
}
object BitcoinUri {
def fromRaw(raw: String): BitcoinUri = {
val dataWithoutPrefix = PaymentRequestExt.removePrefix(raw)
val uri = Uri.parse(s"$bitcoin//$dataWithoutPrefix")
BitcoinUri(Success(uri), uri.getHost)
}
}
case class BitcoinUri(uri: Try[Uri], address: String) {
val amount: Option[MilliSatoshi] = uri.map(_ getQueryParameter "amount").map(BigDecimal.apply).map(Denomination.btcBigDecimal2MSat).toOption
val prExt: Option[PaymentRequestExt] = uri.map(_ getQueryParameter "lightning").map(PaymentRequestExt.fromUri).toOption
val message: Option[String] = uri.map(_ getQueryParameter "message").map(trimmed).filter(_.nonEmpty).toOption
val label: Option[String] = uri.map(_ getQueryParameter "label").map(trimmed).filter(_.nonEmpty).toOption
}
object MultiAddressParser extends RegexParsers {
type AddressToAmountItem = (String, Satoshi)
case class AddressToAmount(values: Seq[AddressToAmountItem] = Nil)
private[this] val longSat = "[0-9,]+".r ^^ (_.replace(",", "").toLong.sat)
private[this] val decimalSat = "[0-9]*\\\\.[0-9]+".r ^^ (raw => (BigDecimal(raw) * BtcAmount.Coin).toLong.sat)
private[this] val item = "\\\\w+".r ~ (decimalSat | longSat) ^^ { case address ~ sat => address -> sat }
private[this] val separator = opt(";")
val parse: Parser[AddressToAmount] = repsep(item, separator).map(AddressToAmount)
}
| btcontract/wallet | app/src/main/java/immortan/utils/InputParser.scala | Scala | apache-2.0 | 6,025 |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.support.matching
/**
* Strategies based on LowerCase Matches
*/
trait LowerCaseStrategy extends CaseMatchStrategy {
self: StringMatchBounds =>
override val caseMatchOption: CaseMatchOption = CaseMatchOption.LowerCase
}
| adarro/ddo-calc | subprojects/common/ddo-util/src/main/scala/io/truthencode/ddo/support/matching/LowerCaseStrategy.scala | Scala | apache-2.0 | 897 |
package com.twitter.finagle.builder
/*
* Provides a class for specifying a collection of servers.
* e.g. `finagle-serversets` is an implementation of the Finagle Cluster interface using
* [[com.twitter.com.zookeeper.ServerSets] (
* http://twitter.github.com/commons/apidocs/#com.twitter.common.zookeeper.ServerSet),
* {{{
* val serverSet = new ServerSetImpl(zookeeperClient, "/twitter/services/silly")
* val cluster = new ZookeeperServerSetCluster(serverSet)
* }}}
*/
import java.net.SocketAddress
import com.twitter.finagle.ServiceFactory
/**
* A collection of SocketAddresses. The intention of this interface
* to express membership in a cluster of servers that provide a
* specific service.
*
* Note that a Cluster can be elastic: members can join or leave at
* any time.
*/
trait Cluster {
/**
* Produce a sequence of ServiceFactories that changes as servers join and
* leave the cluster.
*/
def mkFactories[Req, Rep](f: SocketAddress => ServiceFactory[Req, Rep]): Seq[ServiceFactory[Req, Rep]]
/**
* Register a new Server in the cluster at the given SocketAddress, as so
* {{{
* val serviceAddress = new InetSocketAddress(...)
* val server = ServerBuilder()
* .bindTo(serviceAddress)
* .build()
* cluster.join(serviceAddress)
* }}}
*/
def join(address: SocketAddress)
}
class SocketAddressCluster(underlying: Seq[SocketAddress])
extends Cluster
{
private[this] var self = underlying
def mkFactories[Req, Rep](f: SocketAddress => ServiceFactory[Req, Rep]) = self map f
def join(address: SocketAddress) {
self = underlying ++ Seq(address)
}
}
| enachb/finagle_2.9_durgh | finagle-core/src/main/scala/com/twitter/finagle/builder/Cluster.scala | Scala | apache-2.0 | 1,644 |
package io.udash.auth
trait UserCtx {
def has(permission: Permission): Boolean
def isAuthenticated: Boolean
}
object UserCtx {
trait Unauthenticated extends UserCtx {
override def has(permission: Permission): Boolean = false
override def isAuthenticated: Boolean = false
}
} | UdashFramework/udash-core | auth/src/main/scala/io/udash/auth/UserCtx.scala | Scala | apache-2.0 | 292 |
/*
* @author Philip Stutz
* @author Mihaela Verman
*
* Copyright 2013 University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.signalcollect.triplerush.vertices
import com.signalcollect.GraphEditor
import com.signalcollect.triplerush.ChildIdReply
import com.signalcollect.triplerush.util.MemoryEfficientSplayIntSet
import com.signalcollect.util.SplayIntSet
import com.signalcollect.util.Ints
import com.signalcollect.util.FastInsertIntSet
import com.signalcollect.util.SplayNode
/**
* Stores the SplayIntSet with the optimized child deltas in the state.
*/
abstract class OptimizedIndexVertex(
id: Long) extends IndexVertex[Any](id) {
override def afterInitialization(graphEditor: GraphEditor[Long, Any]) {
super.afterInitialization(graphEditor)
}
def handleChildIdRequest(requestor: Long, graphEditor: GraphEditor[Long, Any]) {
val childIds: Array[Int] = {
if (state == null) {
Array[Int]() // Root vertex in an empty store.
} else {
state match {
case i: Int =>
Array(i)
case a: Array[Byte] =>
new FastInsertIntSet(a).toBuffer.toArray
case s: SplayIntSet =>
s.toBuffer.toArray
}
}
}
graphEditor.sendSignal(ChildIdReply(childIds), requestor)
}
override def edgeCount = {
if (state != null) numberOfStoredChildDeltas else 0
}
def cardinality = numberOfStoredChildDeltas
@inline final def numberOfStoredChildDeltas = {
state match {
case i: Int =>
if (i != 0) 1 else 0
case a: Array[Byte] =>
new FastInsertIntSet(a).size
case s: SplayIntSet =>
s.size
}
}
@inline def foreachChildDelta(f: Int => Unit) = {
state match {
case i: Int =>
f(i) // No check for 0, as an index vertex always needs to have at least one child delta set at this point.
case a: Array[Byte] =>
new FastInsertIntSet(a).foreach(f)
case s: SplayIntSet =>
s.foreach(f)
}
}
def addChildDelta(delta: Int): Boolean = {
if (state == null) {
state = delta
true
} else {
state match {
case i: Int =>
if (delta != i) {
var intSet = Ints.createEmptyFastInsertIntSet
intSet = new FastInsertIntSet(intSet).insert(i, 0.01f)
state = new FastInsertIntSet(intSet).insert(delta, 0.01f)
true
} else {
false
}
case a: Array[Byte] =>
val sizeBefore = new FastInsertIntSet(a).size
val intSetAfter = new FastInsertIntSet(a).insert(delta, 0.01f)
val sizeAfter = new FastInsertIntSet(intSetAfter).size
if (sizeAfter >= 1000) {
val splayIntSet = new MemoryEfficientSplayIntSet
val root = new SplayNode(intSetAfter)
splayIntSet.initializeWithRoot(root)
state = splayIntSet
} else {
state = intSetAfter
}
sizeAfter > sizeBefore
case s: SplayIntSet =>
val wasInserted = s.insert(delta)
wasInserted
}
}
}
}
| jacqueslk/triplerush-filter | src/main/scala/com/signalcollect/triplerush/vertices/OptimizedIndexVertex.scala | Scala | apache-2.0 | 3,690 |
package {{package}}
import com.twitter.finagle.{Service, Thrift}
import com.twitter.finagle.stats.{NullStatsReceiver, StatsReceiver}
import com.twitter.scrooge.{ThriftStruct, TReusableMemoryTransport}
import com.twitter.util.Future
import java.nio.ByteBuffer
import java.util.Arrays
import org.apache.thrift.protocol._
import org.apache.thrift.TApplicationException
import org.apache.thrift.transport.TMemoryInputTransport
import scala.collection.mutable.{
ArrayBuffer => mutable$ArrayBuffer, HashMap => mutable$HashMap}
import scala.collection.{Map, Set}
import scala.language.higherKinds
{{docstring}}
@javax.annotation.Generated(value = Array("com.twitter.scrooge.Compiler"))
class {{ServiceName}}$FinagleService(
iface: {{ServiceName}}[Future],
protocolFactory: TProtocolFactory,
stats: StatsReceiver,
maxThriftBufferSize: Int
) extends {{finagleServiceParent}}{{#hasParent}}(iface, protocolFactory, stats, maxThriftBufferSize){{/hasParent}} {
import {{ServiceName}}._
def this(
iface: {{ServiceName}}[Future],
protocolFactory: TProtocolFactory
) = this(iface, protocolFactory, NullStatsReceiver, Thrift.maxThriftBufferSize)
{{^hasParent}}
private[this] val tlReusableBuffer = new ThreadLocal[TReusableMemoryTransport] {
override def initialValue() = TReusableMemoryTransport(512)
}
private[this] def reusableBuffer: TReusableMemoryTransport = {
val buf = tlReusableBuffer.get()
buf.reset()
buf
}
private[this] val resetCounter = stats.scope("buffer").counter("resetCount")
private[this] def resetBuffer(trans: TReusableMemoryTransport): Unit = {
if (trans.currentCapacity > maxThriftBufferSize) {
resetCounter.incr()
tlReusableBuffer.remove()
}
}
protected val functionMap = new mutable$HashMap[String, (TProtocol, Int) => Future[Array[Byte]]]()
protected def addFunction(name: String, f: (TProtocol, Int) => Future[Array[Byte]]) {
functionMap(name) = f
}
protected def exception(name: String, seqid: Int, code: Int, message: String): Future[Array[Byte]] = {
try {
val x = new TApplicationException(code, message)
val memoryBuffer = reusableBuffer
try {
val oprot = protocolFactory.getProtocol(memoryBuffer)
oprot.writeMessageBegin(new TMessage(name, TMessageType.EXCEPTION, seqid))
x.write(oprot)
oprot.writeMessageEnd()
oprot.getTransport().flush()
Future.value(Arrays.copyOfRange(memoryBuffer.getArray(), 0, memoryBuffer.length()))
} finally {
resetBuffer(memoryBuffer)
}
} catch {
case e: Exception => Future.exception(e)
}
}
protected def reply(name: String, seqid: Int, result: ThriftStruct): Future[Array[Byte]] = {
try {
val memoryBuffer = reusableBuffer
try {
val oprot = protocolFactory.getProtocol(memoryBuffer)
oprot.writeMessageBegin(new TMessage(name, TMessageType.REPLY, seqid))
result.write(oprot)
oprot.writeMessageEnd()
Future.value(Arrays.copyOfRange(memoryBuffer.getArray(), 0, memoryBuffer.length()))
} finally {
resetBuffer(memoryBuffer)
}
} catch {
case e: Exception => Future.exception(e)
}
}
final def apply(request: Array[Byte]): Future[Array[Byte]] = {
val inputTransport = new TMemoryInputTransport(request)
val iprot = protocolFactory.getProtocol(inputTransport)
try {
val msg = iprot.readMessageBegin()
val func = functionMap.get(msg.name)
func match {
case _root_.scala.Some(fn) =>
fn(iprot, msg.seqid)
case _ =>
TProtocolUtil.skip(iprot, TType.STRUCT)
exception(msg.name, msg.seqid, TApplicationException.UNKNOWN_METHOD,
"Invalid method name: '" + msg.name + "'")
}
} catch {
case e: Exception => Future.exception(e)
}
}
// ---- end boilerplate.
{{/hasParent}}
{{#functions}}
{{>function}}
{{/function}}
}
| nkhuyu/scrooge | scrooge-generator/src/main/resources/scalagen/finagleService.scala | Scala | apache-2.0 | 3,965 |
import com.twitter.logging.LoggerFactory
import com.twitter.logging.config._
import com.twitter.parrot.config.ParrotServerConfig
import com.twitter.parrot.server.ParrotRequest
new ParrotServerConfig[ParrotRequest, Unit] {
// DIFFERENCES
replayTimeCheck = true
thinkTime = 500
com.twitter.parrot.util.ConsoleHandler.start(Level.ALL)
testHosts = List("api.twitter.com")
charEncoding = "deflate"
// for thrift
parrotPort = 9999
thriftName = "parrot"
clientIdleTimeoutInMs = 15000
idleTimeoutInSec = 300
minThriftThreads = 10
}
| twitter/iago | config/test-slow.scala | Scala | apache-2.0 | 552 |
package cache
import cache.CacheSchema.{TableNames, Tables}
import cache.models.CachedPullRequestPair
import git._
import scala.slick.driver.SQLiteDriver.simple._
import scala.slick.jdbc.meta.MTable
/**
* An info getter implementation for the JGit library.
* @param provider The JGit provider.
*/
class CachePairwiseDecorator(base: PairwiseList, provider: CacheProvider) extends PairwiseDecorator(base) {
implicit lazy val session = provider.Db
lazy val mode = provider.mode
lazy val insertPair = Tables.pairs.insertInvoker
lazy val getPairsByKey = for {
(shaOne, shaTwo) <- Parameters[(String, String)]
p <- Tables.pairs
if p.shaOne === shaOne
if p.shaTwo === shaTwo
} yield p
init()
override def decorate(pair: PullRequestPair): PullRequestPair = {
val cachedPairOption = get(pair)
if (mode == CacheMode.Read)
cachedPairOption match {
case Some(cachedPair) if mode == CacheMode.Read => cachedPair.fill(pair)
case _ =>
}
else if (mode == CacheMode.Write)
cachedPairOption match {
case Some(cachedPair) if !cachedPair.represents(pair) => insert(pair)
case None => insert(pair)
case _ => // Cache already up-to-date
}
pair
}
private def get(pair: PullRequestPair): Option[CachedPullRequestPair] = {
val key = CachedPullRequestPair(pair)
getPairsByKey(key.shaOne, key.shaTwo).firstOption
}
private def insert(pair: PullRequestPair): Unit = {
insertPair.insertOrUpdate(CachedPullRequestPair(pair))
}
def init(): Unit = {
// Create table
if (MTable.getTables(TableNames.pairs).list.isEmpty)
Tables.pairs.ddl.create
}
}
| PRioritizer/PRioritizer-analyzer | src/main/scala/cache/CachePairwiseDecorator.scala | Scala | mit | 1,677 |
package core
class Zone[A] {
private[this] val buffer = new collection.mutable.ArrayBuffer[A]
def transferToEnd(fromIndex: Int, to: Zone[A]): A = {
val elem = buffer.remove(fromIndex)
to.append(elem)
elem
}
def transferMultiToEnd(fromIndexes: Vector[Int], to: Zone[A]): Vector[A] = {
val elems = fromIndexes.map(index => buffer(index))
fromIndexes.sorted.zipWithIndex.foreach {
case (index, offset) =>
buffer.remove(index - offset)
}
to.appendAll(elems)
elems
}
def transferAllToEnd(to: Zone[A]) {
to.appendAll(buffer)
buffer.clear()
}
def shuffle() {
val elems = util.Random.shuffle(buffer)
buffer.clear()
appendAll(elems)
}
def append(elem: A) {
buffer += elem
}
def appendAll(elems: Seq[A]) {
buffer ++= elems
}
def remove(fromIndex: Int): A = buffer.remove(fromIndex)
def isEmpty: Boolean = buffer.isEmpty
def toVector: Vector[A] = buffer.toVector
} | whence/powerlife | scala/powercards_statemachine/core/Zone.scala | Scala | mit | 970 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.execution
import org.apache.hadoop.hive.serde2.`lazy`.LazySimpleSerDe
import org.scalatest.BeforeAndAfterEach
import org.scalatest.exceptions.TestFailedException
import org.apache.spark.{SparkException, TaskContext, TestUtils}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference}
import org.apache.spark.sql.catalyst.plans.physical.Partitioning
import org.apache.spark.sql.execution.{SparkPlan, SparkPlanTest, UnaryExecNode}
import org.apache.spark.sql.hive.test.TestHiveSingleton
import org.apache.spark.sql.types.StringType
class ScriptTransformationSuite extends SparkPlanTest with TestHiveSingleton with
BeforeAndAfterEach {
import spark.implicits._
private val noSerdeIOSchema = HiveScriptIOSchema(
inputRowFormat = Seq.empty,
outputRowFormat = Seq.empty,
inputSerdeClass = None,
outputSerdeClass = None,
inputSerdeProps = Seq.empty,
outputSerdeProps = Seq.empty,
recordReaderClass = None,
recordWriterClass = None,
schemaLess = false
)
private val serdeIOSchema = noSerdeIOSchema.copy(
inputSerdeClass = Some(classOf[LazySimpleSerDe].getCanonicalName),
outputSerdeClass = Some(classOf[LazySimpleSerDe].getCanonicalName)
)
private var defaultUncaughtExceptionHandler: Thread.UncaughtExceptionHandler = _
private val uncaughtExceptionHandler = new TestUncaughtExceptionHandler
protected override def beforeAll(): Unit = {
super.beforeAll()
defaultUncaughtExceptionHandler = Thread.getDefaultUncaughtExceptionHandler
Thread.setDefaultUncaughtExceptionHandler(uncaughtExceptionHandler)
}
protected override def afterAll(): Unit = {
super.afterAll()
Thread.setDefaultUncaughtExceptionHandler(defaultUncaughtExceptionHandler)
}
override protected def afterEach(): Unit = {
super.afterEach()
uncaughtExceptionHandler.cleanStatus()
}
test("cat without SerDe") {
assume(TestUtils.testCommandAvailable("/bin/bash"))
val rowsDf = Seq("a", "b", "c").map(Tuple1.apply).toDF("a")
checkAnswer(
rowsDf,
(child: SparkPlan) => new ScriptTransformationExec(
input = Seq(rowsDf.col("a").expr),
script = "cat",
output = Seq(AttributeReference("a", StringType)()),
child = child,
ioschema = noSerdeIOSchema
),
rowsDf.collect())
assert(uncaughtExceptionHandler.exception.isEmpty)
}
test("cat with LazySimpleSerDe") {
assume(TestUtils.testCommandAvailable("/bin/bash"))
val rowsDf = Seq("a", "b", "c").map(Tuple1.apply).toDF("a")
checkAnswer(
rowsDf,
(child: SparkPlan) => new ScriptTransformationExec(
input = Seq(rowsDf.col("a").expr),
script = "cat",
output = Seq(AttributeReference("a", StringType)()),
child = child,
ioschema = serdeIOSchema
),
rowsDf.collect())
assert(uncaughtExceptionHandler.exception.isEmpty)
}
test("script transformation should not swallow errors from upstream operators (no serde)") {
assume(TestUtils.testCommandAvailable("/bin/bash"))
val rowsDf = Seq("a", "b", "c").map(Tuple1.apply).toDF("a")
val e = intercept[TestFailedException] {
checkAnswer(
rowsDf,
(child: SparkPlan) => new ScriptTransformationExec(
input = Seq(rowsDf.col("a").expr),
script = "cat",
output = Seq(AttributeReference("a", StringType)()),
child = ExceptionInjectingOperator(child),
ioschema = noSerdeIOSchema
),
rowsDf.collect())
}
assert(e.getMessage().contains("intentional exception"))
// Before SPARK-25158, uncaughtExceptionHandler will catch IllegalArgumentException
assert(uncaughtExceptionHandler.exception.isEmpty)
}
test("script transformation should not swallow errors from upstream operators (with serde)") {
assume(TestUtils.testCommandAvailable("/bin/bash"))
val rowsDf = Seq("a", "b", "c").map(Tuple1.apply).toDF("a")
val e = intercept[TestFailedException] {
checkAnswer(
rowsDf,
(child: SparkPlan) => new ScriptTransformationExec(
input = Seq(rowsDf.col("a").expr),
script = "cat",
output = Seq(AttributeReference("a", StringType)()),
child = ExceptionInjectingOperator(child),
ioschema = serdeIOSchema
),
rowsDf.collect())
}
assert(e.getMessage().contains("intentional exception"))
// Before SPARK-25158, uncaughtExceptionHandler will catch IllegalArgumentException
assert(uncaughtExceptionHandler.exception.isEmpty)
}
test("SPARK-14400 script transformation should fail for bad script command") {
assume(TestUtils.testCommandAvailable("/bin/bash"))
val rowsDf = Seq("a", "b", "c").map(Tuple1.apply).toDF("a")
val e = intercept[SparkException] {
val plan =
new ScriptTransformationExec(
input = Seq(rowsDf.col("a").expr),
script = "some_non_existent_command",
output = Seq(AttributeReference("a", StringType)()),
child = rowsDf.queryExecution.sparkPlan,
ioschema = serdeIOSchema)
SparkPlanTest.executePlan(plan, hiveContext)
}
assert(e.getMessage.contains("Subprocess exited with status"))
assert(uncaughtExceptionHandler.exception.isEmpty)
}
test("SPARK-24339 verify the result after pruning the unused columns") {
val rowsDf = Seq(
("Bob", 16, 176),
("Alice", 32, 164),
("David", 60, 192),
("Amy", 24, 180)).toDF("name", "age", "height")
checkAnswer(
rowsDf,
(child: SparkPlan) => new ScriptTransformationExec(
input = Seq(rowsDf.col("name").expr),
script = "cat",
output = Seq(AttributeReference("name", StringType)()),
child = child,
ioschema = serdeIOSchema
),
rowsDf.select("name").collect())
assert(uncaughtExceptionHandler.exception.isEmpty)
}
}
private case class ExceptionInjectingOperator(child: SparkPlan) extends UnaryExecNode {
override protected def doExecute(): RDD[InternalRow] = {
child.execute().map { x =>
assert(TaskContext.get() != null) // Make sure that TaskContext is defined.
Thread.sleep(1000) // This sleep gives the external process time to start.
throw new IllegalArgumentException("intentional exception")
}
}
override def output: Seq[Attribute] = child.output
override def outputPartitioning: Partitioning = child.outputPartitioning
}
| Aegeaner/spark | sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/ScriptTransformationSuite.scala | Scala | apache-2.0 | 7,386 |
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.eval
import cats.laws._
import cats.laws.discipline._
import monix.execution.Callback
import monix.execution.ExecutionModel.AlwaysAsyncExecution
import monix.execution.exceptions.DummyException
import scala.util.{Failure, Success, Try}
object TaskNowSuite extends BaseTestSuite {
test("Task.now should work synchronously") { implicit s =>
var wasTriggered = false
def trigger(): String = { wasTriggered = true; "result" }
val task = Task.now(trigger())
assert(wasTriggered, "wasTriggered")
val f = task.runToFuture
assertEquals(f.value, Some(Success("result")))
}
test("Task.now.runAsync: CancelableFuture should be synchronous for AlwaysAsyncExecution") { s =>
implicit val s2 = s.withExecutionModel(AlwaysAsyncExecution)
var wasTriggered = false
def trigger(): String = { wasTriggered = true; "result" }
val task = Task.now(trigger())
assert(wasTriggered, "wasTriggered")
val f = task.runToFuture
assertEquals(f.value, Some(Success("result")))
}
test("Task.now.runAsync(callback) should work synchronously") { implicit s =>
var result = Option.empty[Try[String]]
var wasTriggered = false
def trigger(): String = { wasTriggered = true; "result" }
val task = Task.now(trigger())
assert(wasTriggered, "wasTriggered")
task.runAsync(Callback.fromTry[String](r => result = Some(r)))
assertEquals(result, Some(Success("result")))
}
test("Task.now.runAsync(callback) should be asynchronous for AlwaysAsyncExecution") { s =>
implicit val s2 = s.withExecutionModel(AlwaysAsyncExecution)
var result = Option.empty[Try[String]]
var wasTriggered = false
def trigger(): String = { wasTriggered = true; "result" }
val task = Task.now(trigger())
assert(wasTriggered, "wasTriggered")
task.runAsync(Callback.fromTry[String](r => result = Some(r)))
assertEquals(result, None)
s2.tick()
assertEquals(result, Some(Success("result")))
}
test("Task.raiseError should work synchronously") { implicit s =>
var wasTriggered = false
val dummy = DummyException("dummy")
def trigger(): Throwable = { wasTriggered = true; dummy }
val task = Task.raiseError(trigger())
assert(wasTriggered, "wasTriggered")
val f = task.runToFuture
assertEquals(f.value, Some(Failure(dummy)))
}
test("Task.raiseError.runAsync: CancelableFuture should be synchronous for AlwaysAsyncExecution") { s =>
implicit val s2 = s.withExecutionModel(AlwaysAsyncExecution)
val dummy = DummyException("dummy")
var wasTriggered = false
def trigger(): Throwable = { wasTriggered = true; dummy }
val task = Task.raiseError[String](trigger())
assert(wasTriggered, "wasTriggered")
val f = task.runToFuture
assertEquals(f.value, Some(Failure(dummy)))
}
test("Task.raiseError.runAsync(callback) should work synchronously") { implicit s =>
var result = Option.empty[Try[String]]
val dummy = DummyException("dummy")
var wasTriggered = false
def trigger(): Throwable = { wasTriggered = true; dummy }
val task = Task.raiseError[String](trigger())
assert(wasTriggered, "wasTriggered")
task.runAsync(Callback.fromTry[String](r => result = Some(r)))
assertEquals(result, Some(Failure(dummy)))
}
test("Task.raiseError.runAsync(callback) should be asynchronous for AlwaysAsyncExecution") { s =>
implicit val s2 = s.withExecutionModel(AlwaysAsyncExecution)
val dummy = DummyException("dummy")
var result = Option.empty[Try[String]]
var wasTriggered = false
def trigger(): Throwable = { wasTriggered = true; dummy }
val task = Task.raiseError[String](trigger())
assert(wasTriggered, "wasTriggered")
task.runAsync(Callback.fromTry[String](r => result = Some(r)))
assertEquals(result, None)
s2.tick()
assertEquals(result, Some(Failure(dummy)))
}
test("Task.now.map should work") { implicit s =>
Coeval.now(1).map(_ + 1).value()
check1 { a: Int =>
Task.now(a).map(_ + 1) <-> Task.now(a + 1)
}
}
test("Task.raiseError.map should be the same as Task.raiseError") { implicit s =>
check {
val dummy = DummyException("dummy")
Task.raiseError[Int](dummy).map(_ + 1) <-> Task.raiseError(dummy)
}
}
test("Task.raiseError.flatMap should be the same as Task.flatMap") { implicit s =>
check {
val dummy = DummyException("dummy")
Task.raiseError[Int](dummy).flatMap(Task.now) <-> Task.raiseError(dummy)
}
}
test("Task.raiseError.flatMap should be protected") { implicit s =>
check {
val dummy = DummyException("dummy")
val err = DummyException("err")
Task.raiseError[Int](dummy).flatMap[Int](_ => throw err) <-> Task.raiseError(dummy)
}
}
test("Task.now.flatMap should protect against user code") { implicit s =>
val ex = DummyException("dummy")
val t = Task.now(1).flatMap[Int](_ => throw ex)
check(t <-> Task.raiseError(ex))
}
test("Task.now.flatMap should be tail recursive") { implicit s =>
def loop(n: Int, idx: Int): Task[Int] =
Task.now(idx).flatMap { a =>
if (idx < n) loop(n, idx + 1).map(_ + 1) else
Task.now(idx)
}
val iterations = s.executionModel.recommendedBatchSize * 20
val f = loop(iterations, 0).runToFuture
s.tickOne()
assertEquals(f.value, None)
s.tick()
assertEquals(f.value, Some(Success(iterations * 2)))
}
test("Task.now should not be cancelable") { implicit s =>
val t = Task.now(10)
val f = t.runToFuture
f.cancel()
s.tick()
assertEquals(f.value, Some(Success(10)))
}
test("Task.raiseError should not be cancelable") { implicit s =>
val dummy = DummyException("dummy")
val t = Task.raiseError(dummy)
val f = t.runToFuture
f.cancel()
s.tick()
assertEquals(f.value, Some(Failure(dummy)))
}
test("Task.now.coeval") { implicit s =>
val result = Task.now(100).runSyncStep
assertEquals(result, Right(100))
}
test("Task.raiseError.coeval") { implicit s =>
val dummy = DummyException("dummy")
val result = Task.raiseError(dummy).attempt.runSyncStep
assertEquals(result, Right(Left(dummy)))
}
}
| Wogan/monix | monix-eval/shared/src/test/scala/monix/eval/TaskNowSuite.scala | Scala | apache-2.0 | 6,893 |
package memnets.fx.fx3d
import scalafx.scene.paint._
trait Phong { self: scalafx.scene.shape.Shape3D =>
val phong: PhongMaterial = new PhongMaterial {
specularColor = Color.web("#bbb")
specularPower = 60
}
material = phong
}
| MemoryNetworks/memnets | fx/src/main/scala/memnets/fx/fx3d/Phong.scala | Scala | apache-2.0 | 241 |
package org.eigengo.mirror
import akka.actor.ActorRefFactory
import spray.routing.{Route, Directives}
object StaticService extends Directives {
def route(implicit arf: ActorRefFactory): Route = {
getFromResourceDirectory("")
}
}
| eigengo/activator-mirror | src/main/scala/org/eigengo/mirror/StaticService.scala | Scala | apache-2.0 | 241 |
package models.tosca
import scalaz._
import Scalaz._
import scalaz.effect.IO
import scalaz.Validation
import scalaz.Validation.FlatMap._
import scalaz.NonEmptyList._
import scalaz.syntax.SemigroupOps
import cache._
import db._
import models.tosca._
import models.json.tosca._
import models.Constants._
import io.megam.auth.funnel.FunnelErrors._
import models.base._
import wash._
import utils.DateHelper
import io.megam.util.Time
import org.joda.time.{DateTime, DateTimeZone}
import org.joda.time.format.{DateTimeFormat,ISODateTimeFormat}
import io.megam.common.uid.UID
import net.liftweb.json._
import net.liftweb.json.scalaz.JsonScalaz._
import java.nio.charset.Charset
import com.datastax.driver.core.{ ResultSet, Row }
import com.websudos.phantom.dsl._
import scala.concurrent.{ Future => ScalaFuture }
import com.websudos.phantom.connectors.{ ContactPoint, KeySpaceDef }
import scala.concurrent.Await
import scala.concurrent.duration._
import com.websudos.phantom.iteratee.Iteratee
import controllers.stack.ImplicitJsonFormats
case class SensorsInput(account_id: String, sensor_type: String, assembly_id: String, assembly_name: String, assemblies_id: String,
node: String, system: String, status: String, source: String, message: String, audit_period_beginning: String,
audit_period_ending: String, audit_period_delta: String, metrics: models.tosca.MetricsList,
created_at: DateTime)
case class SensorsResult(id: String, account_id: String, sensor_type: String, assembly_id: String, assembly_name: String, assemblies_id: String,
node: String, system: String, status: String, source: String, message: String, audit_period_beginning: String,
audit_period_ending: String, audit_period_delta: String, metrics: models.tosca.MetricsList,
created_at: DateTime)
case class Metrics(metric_name: String, metric_value: String, metric_units: String, metric_type: String)
object SensorsResult {
def apply(id: String, account_id: String, sensor_type: String, assembly_id: String, assembly_name: String, assemblies_id: String,
node: String, system: String, status: String, source: String, message: String, audit_period_beginning: String,
audit_period_ending: String, audit_period_delta: String, metrics: models.tosca.MetricsList) =
new SensorsResult(id, account_id, sensor_type, assembly_id, assembly_name, assemblies_id,
node, system, status, source, message, audit_period_beginning, audit_period_ending, audit_period_delta,
metrics, DateHelper.now())
}
sealed class SensorsSacks extends CassandraTable[SensorsSacks, SensorsResult] with ImplicitJsonFormats {
object id extends StringColumn(this)
object account_id extends StringColumn(this) with PartitionKey[String]
object created_at extends DateTimeColumn(this) with PrimaryKey[DateTime]
object assembly_id extends StringColumn(this) with PrimaryKey[String]
object sensor_type extends StringColumn(this) with PrimaryKey[String]
object assembly_name extends StringColumn(this)
object assemblies_id extends StringColumn(this)
object node extends StringColumn(this)
object system extends StringColumn(this)
object status extends StringColumn(this)
object source extends StringColumn(this)
object message extends StringColumn(this)
object audit_period_beginning extends StringColumn(this)
object audit_period_ending extends StringColumn(this)
object audit_period_delta extends StringColumn(this)
object metrics extends JsonListColumn[SensorsSacks, SensorsResult, Metrics](this) {
override def fromJson(obj: String): Metrics = {
JsonParser.parse(obj).extract[Metrics]
}
override def toJson(obj: Metrics): String = {
compactRender(Extraction.decompose(obj))
}
}
def fromRow(row: Row): SensorsResult = {
SensorsResult(
id(row),
account_id(row),
sensor_type(row),
assembly_id(row),
assembly_name(row),
assemblies_id(row),
node(row),
system(row),
status(row),
source(row),
message(row),
audit_period_beginning(row),
audit_period_ending(row),
audit_period_delta(row),
metrics(row),
created_at(row))
}
}
abstract class ConcreteSensors extends SensorsSacks with RootConnector {
override lazy val tableName = "sensors"
override implicit def space: KeySpace = scyllaConnection.space
override implicit def session: Session = scyllaConnection.session
def insertNewRecord(se: SensorsResult): ValidationNel[Throwable, ResultSet] = {
val res = insert.value(_.id, se.id)
.value(_.account_id, se.account_id)
.value(_.sensor_type, se.sensor_type)
.value(_.assembly_id, se.assembly_id)
.value(_.assembly_name, se.assembly_name)
.value(_.assemblies_id, se.assemblies_id)
.value(_.node, se.node)
.value(_.system, se.system)
.value(_.status, se.status)
.value(_.source, se.source)
.value(_.message, se.message)
.value(_.audit_period_beginning, se.audit_period_beginning)
.value(_.audit_period_ending, se.audit_period_ending)
.value(_.audit_period_delta, se.audit_period_delta)
.value(_.metrics, se.metrics)
.value(_.created_at, se.created_at)
.future()
Await.result(res, 5.seconds).successNel
}
def deleteRecords(email: String): ValidationNel[Throwable, ResultSet] = {
val res = delete.where(_.account_id eqs email).future()
Await.result(res, 5.seconds).successNel
}
def deleteRecordsByAssembly(id: String, email: String): ValidationNel[Throwable, ResultSet] = {
val res = delete.where(_.account_id eqs email).and(_.assembly_id eqs id).future()
Await.result(res, 5.seconds).successNel
}
}
object Sensors extends ConcreteSensors {
private def mkSensorsSack(email: String, input: String): ValidationNel[Throwable, SensorsResult] = {
val ripNel: ValidationNel[Throwable, SensorsInput] = (Validation.fromTryCatchThrowable[SensorsInput, Throwable] {
parse(input).extract[SensorsInput]
} leftMap { t: Throwable => new MalformedBodyError(input, t.getMessage) }).toValidationNel //capture failure
for {
ses <- ripNel
uir <- (UID("sen").get leftMap { ut: NonEmptyList[Throwable] => ut })
} yield {
new SensorsResult(uir.get._1 + uir.get._2, ses.account_id, ses.sensor_type, ses.assembly_id, ses.assembly_name,
ses.assemblies_id, ses.node, ses.system, ses.status, ses.source, ses.message, ses.audit_period_beginning,
ses.audit_period_ending, ses.audit_period_delta, ses.metrics, DateHelper.now())
}
}
def create(email: String, input: String): ValidationNel[Throwable, Option[SensorsResult]] = {
for {
se <- (mkSensorsSack(email, input) leftMap { err: NonEmptyList[Throwable] => err })
set <- (insertNewRecord(se) leftMap { t: NonEmptyList[Throwable] => t })
} yield {
play.api.Logger.warn(("%s%s%-20s%s%s").format(Console.GREEN, Console.BOLD, "Sensors","|+| ✔", Console.RESET))
se.some
}
}
def delete(email: String): ValidationNel[Throwable, Option[SensorsResult]] = {
deleteRecords(email) match {
case Success(value) => Validation.success[Throwable, Option[SensorsResult]](none).toValidationNel
case Failure(err) => Validation.success[Throwable, Option[SensorsResult]](none).toValidationNel
}
}
def deleteByAssembly(id:String, email: String): ValidationNel[Throwable, Option[SensorsResult]] = {
deleteRecordsByAssembly(id, email) match {
case Success(value) => Validation.success[Throwable, Option[SensorsResult]](none).toValidationNel
case Failure(err) => Validation.success[Throwable, Option[SensorsResult]](none).toValidationNel
}
}
}
| indykish/verticegateway | app/models/tosca/Sensors.scala | Scala | mit | 7,770 |
/**
* Copyright 2017 Interel
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package core3.test.specs.prop
import org.scalatest.prop.GeneratorDrivenPropertyChecks
import org.scalatest.{Matchers, fixture}
abstract class PropertySpec extends fixture.FlatSpec with Matchers with GeneratorDrivenPropertyChecks
| Interel-Group/core3 | src/test/scala/core3/test/specs/prop/PropertySpec.scala | Scala | apache-2.0 | 833 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.csv
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileStatus, Path}
import org.apache.hadoop.mapreduce._
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.csv.{CSVHeaderChecker, CSVOptions, UnivocityParser}
import org.apache.spark.sql.catalyst.expressions.ExprUtils
import org.apache.spark.sql.catalyst.util.CompressionCodecs
import org.apache.spark.sql.errors.QueryCompilationErrors
import org.apache.spark.sql.execution.datasources._
import org.apache.spark.sql.sources._
import org.apache.spark.sql.types._
import org.apache.spark.util.SerializableConfiguration
/**
* Provides access to CSV data from pure SQL statements.
*/
class CSVFileFormat extends TextBasedFileFormat with DataSourceRegister {
override def shortName(): String = "csv"
override def isSplitable(
sparkSession: SparkSession,
options: Map[String, String],
path: Path): Boolean = {
val parsedOptions = new CSVOptions(
options,
columnPruning = sparkSession.sessionState.conf.csvColumnPruning,
sparkSession.sessionState.conf.sessionLocalTimeZone)
val csvDataSource = CSVDataSource(parsedOptions)
csvDataSource.isSplitable && super.isSplitable(sparkSession, options, path)
}
override def inferSchema(
sparkSession: SparkSession,
options: Map[String, String],
files: Seq[FileStatus]): Option[StructType] = {
val parsedOptions = new CSVOptions(
options,
columnPruning = sparkSession.sessionState.conf.csvColumnPruning,
sparkSession.sessionState.conf.sessionLocalTimeZone)
CSVDataSource(parsedOptions).inferSchema(sparkSession, files, parsedOptions)
}
override def prepareWrite(
sparkSession: SparkSession,
job: Job,
options: Map[String, String],
dataSchema: StructType): OutputWriterFactory = {
val conf = job.getConfiguration
val csvOptions = new CSVOptions(
options,
columnPruning = sparkSession.sessionState.conf.csvColumnPruning,
sparkSession.sessionState.conf.sessionLocalTimeZone)
csvOptions.compressionCodec.foreach { codec =>
CompressionCodecs.setCodecConfiguration(conf, codec)
}
new OutputWriterFactory {
override def newInstance(
path: String,
dataSchema: StructType,
context: TaskAttemptContext): OutputWriter = {
new CsvOutputWriter(path, dataSchema, context, csvOptions)
}
override def getFileExtension(context: TaskAttemptContext): String = {
".csv" + CodecStreams.getCompressionExtension(context)
}
}
}
override def buildReader(
sparkSession: SparkSession,
dataSchema: StructType,
partitionSchema: StructType,
requiredSchema: StructType,
filters: Seq[Filter],
options: Map[String, String],
hadoopConf: Configuration): (PartitionedFile) => Iterator[InternalRow] = {
val broadcastedHadoopConf =
sparkSession.sparkContext.broadcast(new SerializableConfiguration(hadoopConf))
val parsedOptions = new CSVOptions(
options,
sparkSession.sessionState.conf.csvColumnPruning,
sparkSession.sessionState.conf.sessionLocalTimeZone,
sparkSession.sessionState.conf.columnNameOfCorruptRecord)
// Check a field requirement for corrupt records here to throw an exception in a driver side
ExprUtils.verifyColumnNameOfCorruptRecord(dataSchema, parsedOptions.columnNameOfCorruptRecord)
if (requiredSchema.length == 1 &&
requiredSchema.head.name == parsedOptions.columnNameOfCorruptRecord) {
throw QueryCompilationErrors.queryFromRawFilesIncludeCorruptRecordColumnError()
}
val columnPruning = sparkSession.sessionState.conf.csvColumnPruning
(file: PartitionedFile) => {
val conf = broadcastedHadoopConf.value.value
val actualDataSchema = StructType(
dataSchema.filterNot(_.name == parsedOptions.columnNameOfCorruptRecord))
val actualRequiredSchema = StructType(
requiredSchema.filterNot(_.name == parsedOptions.columnNameOfCorruptRecord))
val parser = new UnivocityParser(
actualDataSchema,
actualRequiredSchema,
parsedOptions,
filters)
val schema = if (columnPruning) actualRequiredSchema else actualDataSchema
val isStartOfFile = file.start == 0
val headerChecker = new CSVHeaderChecker(
schema, parsedOptions, source = s"CSV file: ${file.filePath}", isStartOfFile)
CSVDataSource(parsedOptions).readFile(
conf,
file,
parser,
headerChecker,
requiredSchema)
}
}
override def toString: String = "CSV"
override def hashCode(): Int = getClass.hashCode()
override def equals(other: Any): Boolean = other.isInstanceOf[CSVFileFormat]
override def supportDataType(dataType: DataType): Boolean = dataType match {
case _: AtomicType => true
case udt: UserDefinedType[_] => supportDataType(udt.sqlType)
case _ => false
}
}
| shaneknapp/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/csv/CSVFileFormat.scala | Scala | apache-2.0 | 5,893 |
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.play.health
import java.util.jar
import play.api.Play
import collection.JavaConversions._
trait Manifest {
import play.api.Play.current
protected def appName:String
lazy val contents: Map[String, String] = resources.foldLeft(Map.empty[String, String]) { (map, url) =>
val manifest = new java.util.jar.Manifest(url.openStream())
if (map.isEmpty && isApplicationManifest(manifest)) {
manifest.getMainAttributes.toMap.map {
t => t._1.toString -> t._2.toString
}
} else {
map
}
}
private val resources = Play.application.classloader.getResources("META-INF/MANIFEST.MF")
private def isApplicationManifest(manifest: jar.Manifest) =
appName == manifest.getMainAttributes.getValue("Implementation-Title")
}
| liquidarmour/play-health | app/uk/gov/hmrc/play/health/Manifest.scala | Scala | apache-2.0 | 1,390 |
package jmh
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra._
import scala.offheap._
@State(Scope.Thread)
class RegionClose {
implicit val props = Region.Props(Pool())
var r: Region = _
@Param(scala.Array("1024", "2048", "4096"))
var allocatedPages: Int = _
@Setup(Level.Invocation)
def setup(): Unit = {
r = Region.open
for (_ <- 1 to allocatedPages)
r.allocate(props.pool.pageSize)
}
@Benchmark
def close = r.close()
}
@State(Scope.Thread)
class RegionOpen {
implicit val props = Region.Props(Pool())
var r: Region = _
@TearDown(Level.Invocation)
def tearDown(): Unit = r.close
@Benchmark
def open = {
r = Region.open
r
}
}
@State(Scope.Thread)
class RegionAllocateCurrent {
implicit val props = Region.Props(Pool())
var r: Region = _
@Setup(Level.Invocation)
def setup(): Unit =
r = Region.open
@TearDown(Level.Invocation)
def tearDown(): Unit = r.close()
@Benchmark
def allocate = r.allocate(16L)
}
@State(Scope.Thread)
class RegionAllocateNext {
implicit val props = Region.Props(Pool())
var r: Region = _
@Setup(Level.Invocation)
def setup(): Unit = {
r = Region.open
r.allocate(props.pool.pageSize)
}
@TearDown(Level.Invocation)
def tearDown(): Unit = r.close()
@Benchmark
def allocate = r.allocate(16L)
}
| arosenberger/scala-offheap | jmh/src/main/scala/Region.scala | Scala | bsd-3-clause | 1,350 |
package maker.utils
import java.text.SimpleDateFormat
import java.util.Date
case class Stopwatch(
startTime : Long = System.nanoTime,
name : String = "",
private var snapshots_ : Map[Any, Long] = Map()
){
private val START = "START"
private val END = "END"
def takeSnapshot(key : Any) = {snapshots_ = snapshots_ + (key -> currentTime); this}
def startInterval(name : String) = takeSnapshot((name, START))
def endInterval(name : String) = takeSnapshot((name, END))
def snapshots = Map[Any, Long]() ++ snapshots_
def snapshotTime(key : Any) : Option[Long] = snapshots.get(key)
def intervalTime(name : String) = (snapshotTime((name, START)), snapshotTime((name, END))) match {
case (Some(t1), Some(t2)) => Some(t2 - t1)
case _ => None
}
def intervalStartAndEndTime(name : String) = (snapshotTime((name, START)), snapshotTime((name, END))) match {
case (Some(t1), Some(t2)) => Some((t1, t2))
case _ => None
}
def currentTime() = System.nanoTime
def nanos() = currentTime - startTime
def ms() : Long = (nanos) / 1000000
def s() : Long = ms / 1000
def toStringSeconds = s() + "(s)"
override def toString : String = name + " " +Stopwatch.milliToHumanString(ms())
}
object Stopwatch {
val global = new Stopwatch
def time[A](name : String)(f: =>A) : A = {
val stopwatch = new Stopwatch(name = name)
val result = f
println("Time: " + stopwatch)
result
}
def time[A](f: =>A) : A = time("")(f)
def milliToHumanString(milli:Long):String = {
if (milli < 1000) {
milli + "(ms)"
} else if (milli < 60*1000) {
(milli / 1000) + "(s) " + (milli%1000) + "(ms)"
} else {
(milli / (60*1000)) + "(m) " + ((milli/1000)%60) + "(s)"
}
}
private val Format = new SimpleDateFormat("HH:mm.ss")
def milliToTimeString(milli:Long) = {
Format.format(new Date(milli))
}
def timeWithInfo[T](f: =>T) = {
val stopWatch = new Stopwatch
val result = f
(TimingInfo(stopWatch.startTime, stopWatch.currentTime), result)
}
}
case class TimingInfo(startTime:Long, endTime:Long) {
val timeTaken = (endTime - startTime) / 1000000
val timeTakenInMilliSeconds : Double = (endTime - startTime) / 1000000.0
}
| cage433/maker | maker/src/maker/utils/Stopwatch.scala | Scala | bsd-2-clause | 2,225 |
/*
* Copyright (c) 2016 SnappyData, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package io.snappydata.core
import scala.reflect.io.Path
import org.apache.spark.SparkConf
/**
* Test data and test context for Snappy store tests
*/
case class TestData(key1: Int, value: String)
case class TestData2(key1: Int, value: String, ref: Int)
case class Data(col1: Int, col2: Int, col3: Int)
case class Data1(pk: Int, sk: String)
case class Data2(pk: Int, Year: Int)
case class Data3(pk1: Int, sk: String , pk2 : Int)
case class Data4(sk: String , pk1: Int, pk2 : Int)
case class RefData(ref: Int, description: String)
case class TRIPDATA(
MEDALLION: String,
HACK_LICENSE: String,
VENDOR_ID: String,
RATE_CODE: Integer,
STORE_AND_FWD_FLAG: String,
PICKUP_DATETIME: String,
DROPOFF_DATETIME: String,
PASSENGER_COUNT: Integer,
TRIP_TIME_IN_SECS: Integer,
TRIP_DISTANCE: Double,
PICKUP_LONGITUDE: Double,
PICKUP_LATITUDE: Double,
DROPOFF_LONGITUDE: Double,
DROPOFF_LATITUDE: Double
)
object FileCleaner {
def deletePath(path: String): Boolean = {
val file = Path(path)
file.exists && file.deleteRecursively()
}
def cleanStoreFiles(): Unit = {
deletePath("./metastore_db")
deletePath("./warehouse")
Path(".").walkFilter { f =>
f.name.startsWith("BACKUPGFXD-DEFAULT-DISKSTORE") ||
(f.name.startsWith("locator") && f.name.endsWith(".dat"))
}.foreach(_.deleteRecursively())
deletePath("./datadictionary")
}
}
/** Default SparkConf used for local testing. */
object LocalSparkConf {
def newConf(addOn: (SparkConf) => SparkConf = null): SparkConf = {
val conf = new SparkConf().
setIfMissing("spark.master", "local[4]").
setAppName(getClass.getName)
// conf.set(io.snappydata.Property.ColumnBatchSize.name, "64")
if (addOn != null) {
addOn(conf)
}
conf
}
}
| vjr/snappydata | core/src/test/scala/io/snappydata/core/LocalTestData.scala | Scala | apache-2.0 | 2,488 |
package system.helpers
import java.util.UUID
import _root_.play.api.libs.json.Reads._
import _root_.play.api.libs.json._
import models.Helpers.Columns
import slick.driver.MySQLDriver.api._
/**
* Represents a specific, identifiable application resource.
*/
trait Resource {
/**
* All resources must have a unique identifier
* @return a UUID
*/
def id: UUID
}
/**
* Represents a container or collection of [[Resource]]s. In essence, this allows requesting authorization
* on a particular resource in the collection.
* @tparam R The [[Resource]] of this collection
* @tparam T Type bound used for the TableQuery to ensure the table has an ID column
*/
trait ResourceCollection[T <: Table[R] with Columns.Id[R], R <: Resource] {
/**
* This collection's [[TableQuery]]
* @return A [[TableQuery]]
*/
def tableQuery: TableQuery[T]
/**
* An implicit writer to convert this [[R]] to JSON
* @return A [[Writes]] [[R]]
*/
implicit def writes: Writes[R]
/**
* A set of validators which are used in [[validateArguments]]
* @return A [[Set]] of tuples of (Field Name, Required, Set of validation requirements)
*/
def validators: Set[(String, Boolean, Set[JsValue => Option[Int]])]
/**
* Creates a [[R]] with the given arguments
* @param arguments A key-value argument pair
* @return An optional [[R]]
*/
def create(arguments: Map[String, JsValue]): Option[R] = {
val uuid =
system.helpers.uuid
if (
SlickHelper.queryResult(
tableQuery += creator(uuid, arguments)
) > 0
)
SlickHelper.optionalFindById[T, R](tableQuery, uuid)
else
None
}
/**
* Retrieves the [[R]] with the given ID
* @param id @see [[Resource.id]]
* @return An optional [[R]] if one is found
*/
def read(id: UUID): Option[R] =
SlickHelper.optionalFindById[T, R](tableQuery, id)
/**
* Deletes the [[system.helpers.Resource]] with the given [[system.helpers.Resource.id]]
* @param id @see [[system.helpers.Resource.id]]
* @return true if successful, false otherwise
*/
def delete(id: UUID): Boolean =
SlickHelper.queryResult((tableQuery filter (_.id === id)).delete) > 0
/**
* Given a row [[R]], updates the corresponding values in the given arguments
* Assume that the data is valid.
* @param row A [[R]]
* @param arguments A map containing values to be updated
* @return A new [[R]]
*/
def updater(row: R,
arguments: Map[String, JsValue]): R
/**
* Given a map of field names to values, creates a new [[R]]
* @param uuid The UUID to use in the creation
* @param arguments A map containing values to be updated
* @return A new [[R]]
*/
def creator(uuid: UUID,
arguments: Map[String, JsValue]): R
/**
* Updates the [[R]] with the given ID, to the given arguments
* @param id @see [[Resource.id]]
* @param arguments A key-value argument pair
* @return true if successful, false otherwise
*/
def update(id: UUID,
arguments: Map[String, JsValue]): Boolean =
read(id)
.map(
row =>
SlickHelper.queryResult(
tableQuery
.filter(_.id === id)
.update(updater(row, arguments))
)
)
.fold(false)(_ > 0)
/**
* Dictates if the user with the given ID is allowed READ access to the resource with the given ID
* @param resourceId @see [[Resource.id]]
* @param userId @see [[models.User.id]]
* @param data The JSON object provided by the user containing data about the resource
* @return true if authorized, false if unauthorized
*/
def canRead(resourceId: Option[UUID],
userId: Option[UUID],
data: JsObject = Json.obj()): Boolean
/**
* Dictates if the user with the given ID is allowed MODIFY access to the resource with the given ID
* @param resourceId @see [[Resource.id]]
* @param userId @see [[models.User.id]]
* @param data The JSON object provided by the user containing data about the resource
* @return true if authorized, false if unauthorized
*/
def canModify(resourceId: Option[UUID],
userId: Option[UUID],
data: JsObject = Json.obj()): Boolean
/**
* Dictates if the user with the given ID is allowed DELETE access to the resource with the given ID
* @param resourceId @see [[Resource.id]]
* @param userId @see [[models.User.id]]
* @param data The JSON object provided by the user containing data about the resource
* @return true if authorized, false if unauthorized
*/
def canDelete(resourceId: Option[UUID],
userId: Option[UUID],
data: JsObject = Json.obj()): Boolean
/**
* Dictates if the user with the given ID is allowed CREATE access in this collection
* @param userId @see [[models.User.id]]
* @param data The JSON object provided by the user containing data about the resource
* @return true if authorized, false if unauthorized
*/
def canCreate(resourceId: Option[UUID],
userId: Option[UUID],
data: JsObject = Json.obj()): Boolean
/**
* Checks the provided arguments, and validates the necessary properties
* @param arguments The arguments to be validated
* @return A invalid property mapping from a property name to an error status
*/
def validateArguments(arguments: Map[String, JsValue]): Map[String, Int] =
PropertyValidators.validateArguments(arguments, validators)
}
object PropertyValidators {
/**
* Checks the provided arguments, and validates the necessary properties
* @param arguments The arguments to be validated
* @param validators @see [[ResourceCollection.validators]]
* @return A invalid property mapping from a property name to an error status
*/
def validateArguments(arguments: Map[String, JsValue],
validators: Set[(String, Boolean, Set[(JsValue => Option[Int])])]): Map[String, Int] =
(validators map {
case (key: String, required: Boolean, rules: Set[(JsValue => Option[Int])]) =>
key -> PropertyValidators.validate(key, arguments, required, rules)
}).toMap collect {
case (key, Some(value)) =>
key -> value
}
/**
* Validates the given key against the given arguments, using the given set of rules
* @param key The key in the arguments map
* @param arguments The list of arguments containing the key
* @param required Dictates if the property is required
* @param rules The rules to match against the argument
* @return An optional error code
*/
def validate(key: String,
arguments: Map[String, JsValue],
required: Boolean,
rules: Set[JsValue => Option[Int]]): Option[Int] =
arguments.get(key)
.fold(
if (required)
Some(PropertyErrorCodes.NO_VALUE)
else
None
)(a => rules.foldLeft[Option[Int]](None) {
case (None, rule: (JsValue => Option[Int])) =>
rule(a)
case (Some(x), _) =>
Some(x)
})
/**
* Validates a name field
* @param s The given input
* @return An optional error code
*/
def name(s: JsValue): Option[Int] =
s.validate(__.read[JsString])
.fold(
_ => Some(PropertyErrorCodes.INVALID_TYPE),
_ => s.validate[String](__.read(minLength[String](2)))
.fold(
_ => Some(PropertyErrorCodes.TOO_SHORT),
_ => s.validate[String](maxLength[String](30))
.fold(
_ => Some(PropertyErrorCodes.TOO_LONG), {
case namePattern(_*) =>
None
case _ =>
Some(PropertyErrorCodes.NOT_NAME)
}
)
)
)
/**
* Validates a title field
* @param s The given input
* @return An optional error code
*/
def title(s: JsValue): Option[Int] =
s.validate(__.read[JsString])
.fold(
_ => Some(PropertyErrorCodes.INVALID_TYPE),
_ => s.validate[String](__.read(minLength[String](2)))
.fold(
_ => Some(PropertyErrorCodes.TOO_SHORT),
_ => s.validate[String](maxLength[String](100))
.fold(
_ => Some(PropertyErrorCodes.TOO_LONG), {
case namePattern(_*) =>
None
case _ =>
Some(PropertyErrorCodes.NOT_TITLE)
}
)
)
)
/**
* Validates a content field
* @param s The given input
* @return An optional error code
*/
def content(s: JsValue): Option[Int] =
s.validate(__.read[JsString])
.fold(
_ => Some(PropertyErrorCodes.INVALID_TYPE),
_ => s.validate[String](__.read(minLength[String](100)))
.fold(
_ => Some(PropertyErrorCodes.TOO_SHORT),
_ => None
)
)
/**
* Validates an email address
* @param s The given input
* @return An optional error code
*/
def email(s: JsValue): Option[Int] =
s.validate(__.read[JsString])
.fold(
_ => Some(PropertyErrorCodes.INVALID_TYPE),
_.validate(Reads.email)
.fold(
_ => Some(PropertyErrorCodes.INVALID_EMAIL),
_ => None
)
)
/**
* Validates a password field, requiring it to be a string,
* be at least two letters long, be at most 200 characters long,
* contain at least one number,
* and at least one non-alpha numeric character
* @param s The given input
* @return An optional error code
*/
def password(s: JsValue): Option[Int] =
s.validate(__.read[JsString])
.fold(
_ => Some(PropertyErrorCodes.INVALID_TYPE),
_.validate[String](minLength[String](2))
.fold(
_ => Some(PropertyErrorCodes.TOO_SHORT),
_ => s.validate[String](maxLength[String](255))
.fold(
_ => Some(PropertyErrorCodes.TOO_LONG),
p => if ((nonAlphaNumericPattern findAllIn p).isEmpty || (numericPattern findAllIn p).isEmpty)
Some(PropertyErrorCodes.NOT_COMPLEX_ENOUGH)
else
None
)
)
)
/**
* Validates an integer field
* @param s The given input
* @return An optional error code
*/
def integer(s: JsValue): Option[Int] =
s.validate(__.read[Int])
.fold(
_ => Some(PropertyErrorCodes.INVALID_TYPE),
_ => None
)
/**
* Validates a score field, meaning the integer is between 0 and 100
* @param s The given input
* @return An optional error code
*/
def score(s: JsValue): Option[Int] =
integer(s)
.fold(
s.validate(__.read(min[Int](0)))
.fold(
_ => Some(PropertyErrorCodes.TOO_SMALL),
_ => s.validate(__.read(max[Int](100)))
.fold(
_ => None,
_ => Some(PropertyErrorCodes.TOO_LARGE))
)
)(Some(_))
/**
* Validates a UUID4
* @param s The given input
* @return An optional error code
*/
def uuid4(s: JsValue): Option[Int] =
s.validate(__.read[String])
.fold(
_ => Some(PropertyErrorCodes.INVALID_TYPE), {
case uuid4Pattern(_*) =>
None
case _ =>
Some(PropertyErrorCodes.NOT_UUID)
}
)
private val namePattern =
"""[A-Za-z-]*""".r
private val nonAlphaNumericPattern =
"""([^A-Za-z0-9])""".r
private val numericPattern =
"""([0-9])""".r
private val uuid4Pattern =
"""[0-9a-f\\-]{36}""".r
object PropertyErrorCodes {
val NO_VALUE = 0
val INVALID_TYPE = 1
val TOO_SHORT = 2
val TOO_LONG = 3
val INVALID_EMAIL = 4
val INVALID_CHARACTERS = 5
val NOT_COMPLEX_ENOUGH = 6
val NOT_UUID = 7
val NOT_NAME = 8
val NOT_TITLE = 9
val TOO_SMALL = 10
val TOO_LARGE = 11
}
}
| Xanho/xanho-api | app/system/helpers/Resource.scala | Scala | apache-2.0 | 12,173 |
package amora.backend.schema
import java.net.URLEncoder
import amora.converter.protocol._
object Schema {
def mkShortId(s: Schema): String = s match {
case Project(name) ⇒
name
case Artifact(owner, organization, name, version) ⇒
s"${mkShortId(owner)}/$organization/$name/$version"
case File(owner, name) ⇒
s"${mkShortId(owner)}/$name"
case Package(name, owner) ⇒
s"${mkShortId(owner)}/$name"
case Class(name, owner) ⇒
s"${mkShortId(owner)}/$name"
case AbstractClass(name, owner) ⇒
s"${mkShortId(owner)}/$name"
case Object(name, owner) ⇒
s"${mkShortId(owner)}/$name"
case Trait(name, owner) ⇒
s"${mkShortId(owner)}/$name"
case Def(name, owner) ⇒
s"${mkShortId(owner)}/$name"
case Val(name, owner) ⇒
s"${mkShortId(owner)}/$name"
case Var(name, owner) ⇒
s"${mkShortId(owner)}/$name"
case LazyVal(name, owner) ⇒
s"${mkShortId(owner)}/$name"
}
def mkId(s: Schema): String = s match {
case _: Project ⇒
s"http://amora.center/kb/amora/Project/${mkShortId(s)}"
case _: Artifact ⇒
s"http://amora.center/kb/amora/Artifact/${mkShortId(s)}"
case _: File ⇒
s"http://amora.center/kb/amora/File/${mkShortId(s)}"
case _: Package ⇒
s"http://amora.center/kb/amora/Package/${mkShortId(s)}"
case _: Class ⇒
s"http://amora.center/kb/amora/Class/${mkShortId(s)}"
case _: AbstractClass ⇒
s"http://amora.center/kb/amora/AbstractClass/${mkShortId(s)}"
case _: Object ⇒
s"http://amora.center/kb/amora/Object/${mkShortId(s)}"
case _: Trait ⇒
s"http://amora.center/kb/amora/Trait/${mkShortId(s)}"
case _: Def ⇒
s"http://amora.center/kb/amora/Def/${mkShortId(s)}"
case _: Val ⇒
s"http://amora.center/kb/amora/Val/${mkShortId(s)}"
case _: Var ⇒
s"http://amora.center/kb/amora/Var/${mkShortId(s)}"
case _: LazyVal ⇒
s"http://amora.center/kb/amora/LazyVal/${mkShortId(s)}"
}
def mkDefn(s: Schema): String = s match {
case _: Project ⇒
s"http://amora.center/kb/amora/Schema/Project"
case _: Artifact ⇒
s"http://amora.center/kb/amora/Schema/Artifact"
case _: File ⇒
s"http://amora.center/kb/amora/Schema/File"
case _: Package ⇒
s"http://amora.center/kb/amora/Schema/Package"
case _: Class ⇒
s"http://amora.center/kb/amora/Schema/Class"
case _: AbstractClass ⇒
s"http://amora.center/kb/amora/Schema/AbstractClass"
case _: Object ⇒
s"http://amora.center/kb/amora/Schema/Object"
case _: Trait ⇒
s"http://amora.center/kb/amora/Schema/Trait"
case _: Def ⇒
s"http://amora.center/kb/amora/Schema/Def"
case _: Val ⇒
s"http://amora.center/kb/amora/Schema/Val"
case _: Var ⇒
s"http://amora.center/kb/amora/Schema/Var"
case _: LazyVal ⇒
s"http://amora.center/kb/amora/Schema/LazyVal"
}
def mkSparqlUpdate(schemas: Seq[Schema]): String = {
val sb = new StringBuilder
def mk(s: Schema): String = s match {
case Project(name) ⇒
val id = mkId(s)
val defn = mkDefn(s)
sb.append(s"""| <$id> a <$defn/> .
| <$id> <$defn/name> "$name" .
|""".stripMargin)
id
case Artifact(owner, organization, name, version) ⇒
val oid = mk(owner)
val id = mkId(s)
val defn = mkDefn(s)
sb.append(s"""| <$id> a <$defn/> .
| <$id> <$defn/owner> <$oid> .
| <$id> <$defn/organization> "$organization" .
| <$id> <$defn/name> "$name" .
| <$id> <$defn/version> "$version" .
|""".stripMargin)
id
case File(owner, fname) ⇒
val oid = mk(owner)
val id = mkId(s)
val defn = mkDefn(s)
sb.append(s"""| <$id> a <$defn/> .
| <$id> <$defn/owner> <$oid> .
| <$id> <$defn/name> "$fname" .
|""".stripMargin)
id
case Package(name, parent) ⇒
val oid = mk(parent)
val id = mkId(s)
val defn = mkDefn(s)
sb.append(s"""| <$id> a <$defn/> .
| <$id> <$defn/owner> <$oid> .
| <$id> <$defn/name> "$name" .
|""".stripMargin)
id
case Class(name, parent) ⇒
val oid = mk(parent)
val id = mkId(s)
val defn = mkDefn(s)
sb.append(s"""| <$id> a <$defn/> .
| <$id> <$defn/owner> <$oid> .
| <$id> <$defn/name> "$name" .
|""".stripMargin)
id
case Def(name, parent) ⇒
val oid = mk(parent)
val id = mkId(s)
val defn = mkDefn(s)
sb.append(s"""| <$id> a <$defn/> .
| <$id> <$defn/owner> <$oid> .
| <$id> <$defn/name> "$name" .
|""".stripMargin)
id
case Val(name, parent) ⇒
val oid = mk(parent)
val id = mkId(s)
val defn = mkDefn(s)
sb.append(s"""| <$id> a <$defn/> .
| <$id> <$defn/owner> <$oid> .
| <$id> <$defn/name> "$name" .
|""".stripMargin)
id
case Var(name, parent) ⇒
val oid = mk(parent)
val id = mkId(s)
val defn = mkDefn(s)
sb.append(s"""| <$id> a <$defn/> .
| <$id> <$defn/owner> <$oid> .
| <$id> <$defn/name> "$name" .
|""".stripMargin)
id
case LazyVal(name, parent) ⇒
val oid = mk(parent)
val id = mkId(s)
val defn = mkDefn(s)
sb.append(s"""| <$id> a <$defn/> .
| <$id> <$defn/owner> <$oid> .
| <$id> <$defn/name> "$name" .
|""".stripMargin)
id
}
sb.append("INSERT DATA {\\n")
schemas foreach mk
sb.append("}")
sb.toString()
}
def mkTurtleString(schemas: Seq[Schema]): String = turtleBuilder {
(addPrefix, addData) ⇒
def mk(s: Schema): String = s match {
case Project(name) ⇒
val id = mkId(s)
val defn = mkDefn(s)
addPrefix("Project", defn+"/")
addData(id, "a", "Project:")
addData(id, s"Project:name", s""""$name"""")
id
case Artifact(owner, organization, name, version) ⇒
val oid = mk(owner)
val id = mkId(s)
val defn = mkDefn(s)
addPrefix("Artifact", defn+"/")
addData(id, "a", "Artifact:")
addData(id, s"Artifact:owner", s"<$oid>")
addData(id, s"Artifact:organization", s""""$organization"""")
addData(id, s"Artifact:name", s""""$name"""")
addData(id, s"Artifact:version", s""""$version"""")
id
case File(owner, name) ⇒
val oid = mk(owner)
val id = mkId(s)
val defn = mkDefn(s)
addPrefix("File", defn+"/")
addData(id, "a", "File:")
addData(id, s"File:owner", s"<$oid>")
addData(id, s"File:name", s""""$name"""")
id
case Package(name, parent) ⇒
val oid = mk(parent)
val id = mkId(s)
val defn = mkDefn(s)
addPrefix("Package", defn+"/")
addData(id, "a", "Package:")
addData(id, s"Package:owner", s"<$oid>")
addData(id, s"Package:name", s""""$name"""")
id
case Class(name, parent) ⇒
val oid = mk(parent)
val id = mkId(s)
val defn = mkDefn(s)
addPrefix("Class", defn+"/")
addData(id, "a", "Class:")
addData(id, s"Class:owner", s"<$oid>")
addData(id, s"Class:name", s""""$name"""")
id
case Def(name, parent) ⇒
val oid = mk(parent)
val id = mkId(s)
val defn = mkDefn(s)
addPrefix("Def", defn+"/")
addData(id, "a", "Def:")
addData(id, s"Def:owner", s"<$oid>")
addData(id, s"Def:name", s""""$name"""")
id
case Val(name, parent) ⇒
val oid = mk(parent)
val id = mkId(s)
val defn = mkDefn(s)
addPrefix("Val", defn+"/")
addData(id, "a", "Val:")
addData(id, s"Val:owner", s"<$oid>")
addData(id, s"Val:name", s""""$name"""")
id
case Var(name, parent) ⇒
val oid = mk(parent)
val id = mkId(s)
val defn = mkDefn(s)
addPrefix("Var", defn+"/")
addData(id, "a", "Var:")
addData(id, s"Var:owner", s"<$oid>")
addData(id, s"Var:name", s""""$name"""")
id
case LazyVal(name, parent) ⇒
val oid = mk(parent)
val id = mkId(s)
val defn = mkDefn(s)
addPrefix("LazyVal", defn+"/")
addData(id, "a", "LazyVal:")
addData(id, s"LazyVal:owner", s"<$oid>")
addData(id, s"LazyVal:name", s""""$name"""")
id
}
schemas foreach mk
}
def mkTurtleUpdate(hierarchies: Seq[Hierarchy]): String = turtleBuilder {
(addPrefix, addData) ⇒
def mkTpe(decl: Decl) = {
if (decl.attachments(Attachment.Lazy) && decl.attachments(Attachment.Val))
"LazyVal"
else if (decl.attachments(Attachment.Abstract) && decl.attachments(Attachment.Class))
"AbstractClass"
else
decl.attachments.collectFirst {
case Attachment.Class ⇒ "Class"
case Attachment.Object ⇒ "Object"
case Attachment.Trait ⇒ "Trait"
case Attachment.Package ⇒ "Package"
case Attachment.Def ⇒ "Def"
case Attachment.Val ⇒ "Val"
case Attachment.Var ⇒ "Var"
}.getOrElse("Decl")
}
def sourceFile(h: Hierarchy) =
h.attachments.collectFirst {
case Attachment.SourceFile(file) ⇒ file
}.getOrElse(throw new IllegalStateException(s"SourceFile attachment expected at `$h` at position `${h.position}` but there were only: ${h.attachments}"))
def mkFullPath(decl: Decl) = {
def findArtifact(schema: Schema): Artifact = schema match {
case a: Artifact ⇒ a
case p: Package ⇒ findArtifact(p.owner)
case f: File ⇒ findArtifact(f.owner)
case _ ⇒ ???
}
val schema = sourceFile(decl)
val shortArtifactId = Schema.mkShortId(findArtifact(schema))
val tpe = mkTpe(decl)
s"http://amora.center/kb/amora/$tpe/$shortArtifactId/${mkShortPath(decl)}"
}
def mkRefPath(ref: Ref): String = {
val declPath = ref.refToDecl match {
case d: Decl ⇒ mkFullPath(d)
case _ ⇒
???
}
val file = sourceFile(ref)
val shortFileId = mkShortId(file)
val ctorAppendix = if (ref.owner.owner.attachments(Attachment.Constructor)) "/ctor" else ""
s"$declPath/$shortFileId${uniqueRef(ref.position)}$ctorAppendix"
}
def mkScopePath(decl: Scope): String = {
def findArtifact(schema: Schema): Artifact = schema match {
case a: Artifact ⇒ a
case p: Package ⇒ findArtifact(p.owner)
case f: File ⇒ findArtifact(f.owner)
case _ ⇒ ???
}
val schema = sourceFile(decl)
val shortArtifactId = Schema.mkShortId(findArtifact(schema))
val tpe = "Scope"
val path = encode(decl.asString).replace('.', '/')
s"http://amora.center/kb/amora/$tpe/$shortArtifactId/$path"
}
def mkOwnerPath(h: Hierarchy, owner: Hierarchy) = {
val isTopLevelDecl = {
def isTopLevelDecl = h.attachments.exists(Set(Attachment.Class, Attachment.Trait, Attachment.Object))
def isPkg = owner.attachments(Attachment.Package)
isTopLevelDecl && isPkg
}
if (isTopLevelDecl)
mkId(sourceFile(h))
else owner match {
case owner: Decl ⇒ mkFullPath(owner)
case owner: Ref ⇒ mkRefPath(owner)
case owner: Scope ⇒ mkScopePath(owner)
}
}
def findNonScopeOwner[A](scope: Scope)(pf: PartialFunction[Hierarchy, A]): A = scope.owner match {
case scope: Scope ⇒
findNonScopeOwner(scope)(pf)
case owner ⇒
if (pf.isDefinedAt(owner))
pf(owner)
else
throw new IllegalStateException(s"Can't convert the owner `$owner` of scope `$scope` at position `${scope.position}`.")
}
def loop(h: Hierarchy): Unit = h match {
case Root ⇒
case decl @ Decl(name, owner) ⇒
val tpe = mkTpe(decl)
val path = mkFullPath(decl)
val schemaPath = s"http://amora.center/kb/amora/Schema/$tpe"
addPrefix(tpe, schemaPath+"/")
addData(path, "a", s"$tpe:")
addData(path, s"$tpe:name", s""""$name"""")
if (h.attachments(Attachment.Param)) {
addData(path, s"$tpe:flag", "<http://amora.center/kb/amora/Flag/param>")
}
if (h.attachments(Attachment.TypeParam)) {
addData(path, s"$tpe:flag", "<http://amora.center/kb/amora/Flag/tparam>")
}
if (h.attachments(Attachment.Constructor)) {
addData(path, s"$tpe:flag", "<http://amora.center/kb/amora/Flag/constructor>")
}
if (h.attachments(Attachment.Implicit)) {
addData(path, s"$tpe:flag", "<http://amora.center/kb/amora/Flag/implicit>")
}
decl.attachments.collect {
case Attachment.JvmSignature(signature) ⇒
addData(path, s"$tpe:jvmSignature", s""""$signature"""")
case Attachment.JvmClass(signature) ⇒
addData(path, s"$tpe:jvmClass", s""""$signature"""")
}
decl.position match {
case RangePosition(start, end) ⇒
addData(path, s"$tpe:posStart", start.toString)
addData(path, s"$tpe:posEnd", end.toString)
case _ ⇒
}
owner match {
case Root ⇒
// The owner of a package is an artifact but this can't be represented
// in the Hierarchy structure. Thus, we index this information separately.
if (!decl.attachments(Attachment.Package)) {
val ownerPath = mkId(sourceFile(decl))
addData(path, s"$tpe:owner", s"""<$ownerPath>""")
}
case owner ⇒
val ownerPath = mkOwnerPath(decl, owner)
addData(path, s"$tpe:owner", s"""<$ownerPath>""")
loop(owner)
}
decl.attachments.collectFirst {
case Attachment.CodeOrder(nr) ⇒
addData(path, s"$tpe:codeOrder", nr.toString)
}
case ref @ Ref(name, refToDecl, owner, calledOn) ⇒
val declPath = refToDecl match {
case d: Decl ⇒ mkFullPath(d)
case _ ⇒
???
}
val path = mkRefPath(ref)
val schemaPath = s"http://amora.center/kb/amora/Schema/Ref"
addPrefix("Ref", schemaPath+"/")
addData(path, "a", "Ref:")
addData(path, "Ref:name", s""""$name"""")
addData(path, "Ref:refToDecl", s"""<$declPath>""")
if (ref.attachments(Attachment.Repeated)) {
addData(path, "Ref:flag", "<http://amora.center/kb/amora/Flag/repeated>")
}
ref.position match {
case RangePosition(start, end) ⇒
addData(path, "Ref:posStart", start.toString)
addData(path, "Ref:posEnd", end.toString)
case _ ⇒
}
owner match {
case Root ⇒
val file = sourceFile(ref)
val ownerPath = mkDefn(file)
addData(path, "Ref:owner", s"""<$ownerPath>""")
case owner: HierarchyWithName ⇒
val ownerPath = mkOwnerPath(ref, owner)
addData(path, "Ref:owner", s"""<$ownerPath>""")
loop(owner)
case owner: Scope ⇒
findNonScopeOwner(owner) {
case h: HierarchyWithName ⇒
val ownerPath = mkOwnerPath(ref, h) + "/" + encode(owner.attachmentAsString)
addData(path, "Ref:owner", s"""<$ownerPath>""")
}
loop(owner.owner)
}
calledOn foreach {
case calledOn: Ref ⇒
val calledOnPath = mkRefPath(calledOn)
addData(path, "Ref:calledOn", s"""<$calledOnPath>""")
case calledOn: Decl ⇒
val calledOnPath = mkFullPath(calledOn)
addData(path, "Ref:calledOn", s"""<$calledOnPath>""")
case _ ⇒
???
}
ref.attachments.collect {
case Attachment.Order(nr) ⇒
addData(path, "Ref:order", nr.toString)
case Attachment.CodeOrder(nr) ⇒
addData(path, "Ref:codeOrder", nr.toString)
}
case scope: Scope ⇒
val path = findNonScopeOwner(scope) {
case decl: Decl ⇒
mkFullPath(decl) + "/" + encode(scope.attachmentAsString)
case ref: Ref ⇒
mkRefPath(ref) + "/" + encode(scope.attachmentAsString)
}
val schemaPath = "http://amora.center/kb/amora/Schema/Scope"
addPrefix("Scope", schemaPath+"/")
addData(path, "a", "Scope:")
scope.position match {
case RangePosition(start, end) ⇒
addData(path, "Scope:posStart", start.toString)
addData(path, "Scope:posEnd", end.toString)
case _ ⇒
}
findNonScopeOwner(scope) {
case decl: Decl ⇒
val ownerPath = mkFullPath(decl)
addData(path, "Scope:owner", s"""<$ownerPath>""")
case ref: Ref ⇒
val ownerPath = mkRefPath(ref)
addData(path, "Scope:owner", s"""<$ownerPath>""")
}
loop(scope.owner)
}
hierarchies foreach loop
}
def turtleBuilder(f: ((String, String) ⇒ Unit, (String, String, String) ⇒ Unit) ⇒ Unit) = {
var prefixe = Map[String, String]()
var data = Map[String, Map[String, String]]()
def addPrefix(name: String, url: String) = {
if (!prefixe.contains(name))
prefixe += name → url
}
def addData(url: String, k: String, v: String) = {
val turtleUrl =
if (url.contains(":") && !url.contains("://")) {
val prefix = url.takeWhile(_ != ':')
if (!prefixe.contains(prefix))
throw new IllegalArgumentException(s"Prefix `$prefix` not found.")
else
url
}
else
s"<$url>"
data.get(turtleUrl) match {
case Some(map) ⇒
data += turtleUrl → (map + k → v)
case None ⇒
data += turtleUrl → Map(k → v)
}
}
f(addPrefix, addData)
val stringOrdering = new Ordering[String] {
def compare(a: String, b: String) = String.CASE_INSENSITIVE_ORDER.compare(a, b)
}
val sb = new StringBuilder
val prefixLen = (if (prefixe.isEmpty) 0 else prefixe.keys.map(_.length).max) + 3
prefixe.toList.sortBy(_._1)(stringOrdering) foreach {
case (name, url) ⇒
sb append "@prefix " append name append ":" append " " * (prefixLen - name.length) append "<" append url append "> .\\n"
}
val len = (if (data.isEmpty) 0 else data.values.map(_.keys.map(_.length).max).max) + 3
data.toList.sortBy(_._1)(stringOrdering) foreach {
case (url, kv) ⇒
sb append url append "\\n"
kv.toList.sortBy(_._1)(stringOrdering) foreach {
case (k, v) ⇒
sb append " " append k append " " * (len - k.length) append v append " ;\\n"
}
sb append ".\\n"
}
sb.toString
}
def mkSparqlUpdate(schema: Schema, data: Seq[Hierarchy]): String = {
val sb = new StringBuilder
def mkTpe(decl: Decl) = {
if (decl.attachments(Attachment.Lazy) && decl.attachments(Attachment.Val))
"LazyVal"
else if (decl.attachments(Attachment.Abstract) && decl.attachments(Attachment.Class))
"AbstractClass"
else
decl.attachments.collectFirst {
case Attachment.Class ⇒ "Class"
case Attachment.Object ⇒ "Object"
case Attachment.Trait ⇒ "Trait"
case Attachment.Package ⇒ "Package"
case Attachment.Def ⇒ "Def"
case Attachment.Val ⇒ "Val"
case Attachment.Var ⇒ "Var"
}.getOrElse("Decl")
}
def mkFullPath(decl: Decl) = {
def findArtifact(schema: Schema): Artifact = schema match {
case a: Artifact ⇒ a
case p: Package ⇒ findArtifact(p.owner)
case f: File ⇒ findArtifact(f.owner)
case _ ⇒ ???
}
val a = findArtifact(schema)
val tpe = mkTpe(decl)
s"http://amora.center/kb/amora/$tpe/${Schema.mkShortId(a)}/${mkShortPath(decl)}"
}
def mkOwnerPath(h: Hierarchy, owner: Decl) = {
val isTopLevelDecl = {
def isTopLevelDecl = h.attachments.exists(Set(Attachment.Class, Attachment.Trait, Attachment.Object))
def isPkg = owner.attachments(Attachment.Package)
isTopLevelDecl && isPkg
}
if (isTopLevelDecl)
Schema.mkId(schema)
else
mkFullPath(owner)
}
def loop(h: Hierarchy): Unit = h match {
case Root ⇒
case decl @ Decl(name, owner) ⇒
val tpe = mkTpe(decl)
val path = mkFullPath(decl)
val schemaPath = s"http://amora.center/kb/amora/Schema/$tpe"
sb.append(s" <$path> a <$schemaPath/> .\\n")
sb.append(s""" <$path> <$schemaPath/name> "$name" .""" + "\\n")
if (h.attachments(Attachment.Param)) {
sb.append(s""" <$path> <$schemaPath/flag> <http://amora.center/kb/amora/Flag/param> .""" + "\\n")
}
if (h.attachments(Attachment.TypeParam)) {
sb.append(s""" <$path> <$schemaPath/flag> <http://amora.center/kb/amora/Flag/tparam> .""" + "\\n")
}
if (h.attachments(Attachment.Constructor)) {
sb.append(s""" <$path> <$schemaPath/flag> <http://amora.center/kb/amora/Flag/constructor> .""" + "\\n")
}
if (h.attachments(Attachment.Implicit)) {
sb.append(s""" <$path> <$schemaPath/flag> <http://amora.center/kb/amora/Flag/implicit> .""" + "\\n")
}
decl.attachments.collect {
case Attachment.JvmSignature(signature) ⇒
sb.append(s""" <$path> <$schemaPath/jvmSignature> "$signature" .""" + "\\n")
case Attachment.JvmClass(signature) ⇒
sb.append(s""" <$path> <$schemaPath/jvmClass> "$signature" .""" + "\\n")
}
decl.position match {
case RangePosition(start, end) ⇒
sb.append(s" <$path> <$schemaPath/posStart> $start .\\n")
sb.append(s" <$path> <$schemaPath/posEnd> $end .\\n")
case _ ⇒
}
owner match {
case Root ⇒
// The owner of a package is an artifact but this can't be represented
// in the Hierarchy structure. Thus, we index this information separately.
if (!decl.attachments(Attachment.Package)) {
val ownerPath = Schema.mkId(schema)
sb.append(s" <$path> <$schemaPath/owner> <$ownerPath> .\\n")
}
case owner: Decl ⇒
val ownerPath = mkOwnerPath(decl, owner)
sb.append(s" <$path> <$schemaPath/owner> <$ownerPath> .\\n")
loop(owner)
case _: Ref ⇒
case _: Scope ⇒
}
case ref @ Ref(name, refToDecl, owner, qualifier) ⇒
val declPath = refToDecl match {
case d: Decl ⇒ mkFullPath(d)
// TODO replace this with a real implementation
case _ ⇒ "???"
}
val path = s"$declPath/${Schema.mkShortId(schema)}${uniqueRef(ref.position)}"
val schemaPath = s"http://amora.center/kb/amora/Schema/Ref"
sb.append(s" <$path> a <$schemaPath/> .\\n")
sb.append(s""" <$path> <$schemaPath/name> "$name" .""" + "\\n")
sb.append(s" <$path> <$schemaPath/refToDecl> <$declPath> .\\n")
ref.position match {
case RangePosition(start, end) ⇒
sb.append(s" <$path> <$schemaPath/posStart> $start .\\n")
sb.append(s" <$path> <$schemaPath/posEnd> $end .\\n")
case _ ⇒
}
owner match {
case Root ⇒
val ownerPath = Schema.mkDefn(schema)
sb.append(s" <$path> <$schemaPath/owner> <$ownerPath> .\\n")
case owner: Decl ⇒
val ownerPath = mkOwnerPath(ref, owner)
sb.append(s" <$path> <$schemaPath/owner> <$ownerPath> .\\n")
loop(owner)
case _: Ref ⇒
case _: Scope ⇒
}
case _: Scope ⇒
}
sb.append("INSERT DATA {\\n")
data foreach loop
sb.append("}")
sb.toString
}
private def uniqueRef(pos: Position) = pos match {
case RangePosition(start, _) ⇒
s"/$start"
case _ ⇒
""
}
private def encode(str: String): String =
URLEncoder.encode(str, "UTF-8")
private def mkShortPath(decl: Decl) = {
val Decl(name, owner) = decl
val n = encode(name)
val ownerPath = owner match {
case Root ⇒
""
case _: Decl | _: Scope ⇒
encode(owner.asString).replace('.', '/')
case _: Ref ⇒
val path = encode(owner.asString).replace('.', '/')
s"$path${uniqueRef(owner.position)}"
}
val sig = decl.attachments.collectFirst {
case Attachment.JvmSignature(signature) ⇒ encode(signature)
}.getOrElse("")
val paramAtt = encode(decl.attachments.collectFirst {
case Attachment.Param ⇒ "<param>"
case Attachment.TypeParam ⇒ "<tparam>"
}.getOrElse(""))
val originPath = if (ownerPath.isEmpty) "" else ownerPath + "/"
val fullPath = s"$originPath$paramAtt$n$sig"
fullPath
}
}
| sschaef/tooling-research | converter/protocol/src/main/scala/amora/backend/schema/Schema.scala | Scala | mit | 25,772 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ui.jobs
import javax.servlet.http.HttpServletRequest
import scala.xml.{Node, NodeSeq}
import org.apache.spark.scheduler.Schedulable
import org.apache.spark.status.PoolData
import org.apache.spark.status.api.v1._
import org.apache.spark.ui.{UIUtils, WebUIPage}
/** Page showing list of all ongoing and recently finished stages and pools */
private[ui] class AllStagesPage(parent: StagesTab) extends WebUIPage("") {
private val sc = parent.sc
private def isFairScheduler = parent.isFairScheduler
def render(request: HttpServletRequest): Seq[Node] = {
val allStages = parent.store.stageList(null)
val activeStages = allStages.filter(_.status == StageStatus.ACTIVE)
val pendingStages = allStages.filter(_.status == StageStatus.PENDING)
val completedStages = allStages.filter(_.status == StageStatus.COMPLETE)
val failedStages = allStages.filter(_.status == StageStatus.FAILED).reverse
val numCompletedStages = completedStages.size
val numFailedStages = failedStages.size
val subPath = "stages"
val activeStagesTable =
new StageTableBase(parent.store, request, activeStages, "active", "activeStage",
parent.basePath, subPath, parent.isFairScheduler, parent.killEnabled, false)
val pendingStagesTable =
new StageTableBase(parent.store, request, pendingStages, "pending", "pendingStage",
parent.basePath, subPath, parent.isFairScheduler, false, false)
val completedStagesTable =
new StageTableBase(parent.store, request, completedStages, "completed", "completedStage",
parent.basePath, subPath, parent.isFairScheduler, false, false)
val failedStagesTable =
new StageTableBase(parent.store, request, failedStages, "failed", "failedStage",
parent.basePath, subPath, parent.isFairScheduler, false, true)
// For now, pool information is only accessible in live UIs
val pools = sc.map(_.getAllPools).getOrElse(Seq.empty[Schedulable]).map { pool =>
val uiPool = parent.store.asOption(parent.store.pool(pool.name)).getOrElse(
new PoolData(pool.name, Set()))
pool -> uiPool
}.toMap
val poolTable = new PoolTable(pools, parent)
val shouldShowActiveStages = activeStages.nonEmpty
val shouldShowPendingStages = pendingStages.nonEmpty
val shouldShowCompletedStages = completedStages.nonEmpty
val shouldShowFailedStages = failedStages.nonEmpty
val completedStageNumStr = if (numCompletedStages == completedStages.size) {
s"$numCompletedStages"
} else {
s"$numCompletedStages, only showing ${completedStages.size}"
}
val summary: NodeSeq =
<div>
<ul class="unstyled">
{
if (shouldShowActiveStages) {
<li>
<a href="#active"><strong>Active Stages:</strong></a>
{activeStages.size}
</li>
}
}
{
if (shouldShowPendingStages) {
<li>
<a href="#pending"><strong>Pending Stages:</strong></a>
{pendingStages.size}
</li>
}
}
{
if (shouldShowCompletedStages) {
<li id="completed-summary">
<a href="#completed"><strong>Completed Stages:</strong></a>
{completedStageNumStr}
</li>
}
}
{
if (shouldShowFailedStages) {
<li>
<a href="#failed"><strong>Failed Stages:</strong></a>
{numFailedStages}
</li>
}
}
</ul>
</div>
var content = summary ++
{
if (sc.isDefined && isFairScheduler) {
<h4>Fair Scheduler Pools ({pools.size})</h4> ++ poolTable.toNodeSeq
} else {
Seq.empty[Node]
}
}
if (shouldShowActiveStages) {
content ++= <h4 id="active">Active Stages ({activeStages.size})</h4> ++
activeStagesTable.toNodeSeq
}
if (shouldShowPendingStages) {
content ++= <h4 id="pending">Pending Stages ({pendingStages.size})</h4> ++
pendingStagesTable.toNodeSeq
}
if (shouldShowCompletedStages) {
content ++= <h4 id="completed">Completed Stages ({completedStageNumStr})</h4> ++
completedStagesTable.toNodeSeq
}
if (shouldShowFailedStages) {
content ++= <h4 id ="failed">Failed Stages ({numFailedStages})</h4> ++
failedStagesTable.toNodeSeq
}
UIUtils.headerSparkPage("Stages for All Jobs", content, parent)
}
}
| ron8hu/spark | core/src/main/scala/org/apache/spark/ui/jobs/AllStagesPage.scala | Scala | apache-2.0 | 5,379 |
package colang.ast.raw.expression
import colang.ast.raw.ParserImpl
import colang.{MappedStrategy, SourceCode}
/**
* Represents a function call expression.
* @param function function to call
* @param arguments function arguments
*/
case class FunctionCall(function: Expression, arguments: ArgumentList) extends Expression {
def source: SourceCode = function.source + arguments.source
}
object FunctionCall {
val strategy: ParserImpl.Strategy[PostfixOperator] = MappedStrategy(
ArgumentList.strategy,
(arguments: ArgumentList) => new PostfixOperator {
def apply = function => FunctionCall(function, arguments)
def source: SourceCode = arguments.source
})
}
| merkispavel/colang | src/main/scala/colang/ast/raw/expression/FunctionCall.scala | Scala | mit | 694 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.