lang
stringclasses
2 values
license
stringclasses
13 values
stderr
stringlengths
0
343
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
6
87.7k
new_contents
stringlengths
0
6.23M
new_file
stringlengths
3
311
old_contents
stringlengths
0
6.23M
message
stringlengths
6
9.1k
old_file
stringlengths
3
311
subject
stringlengths
0
4k
git_diff
stringlengths
0
6.31M
JavaScript
mit
e283a77e1570c987e108bc2f1a39747902558c45
0
arose/ngl,j0kaso/ngl,fredludlow/ngl,fredludlow/ngl,fredludlow/ngl,arose/ngl,arose/ngl,arose/ngl,fredludlow/ngl,j0kaso/ngl,j0kaso/ngl
/** * @file Mouse Observer * @author Alexander Rose <[email protected]> * @private */ import { Vector2 } from "../../lib/three.es6.js"; import Signal from "../../lib/signals.es6.js"; import { defaults } from "../utils.js"; /** * @example * mouseObserver.signals.scroll.add( function( delta ){ ... } ); * * @typedef {Object} MouseSignals * @property {Signal<Integer, Integer>} moved - on move: deltaX, deltaY * @property {Signal<Number>} scrolled - on scroll: delta * @property {Signal<Integer, Integer>} dragged - on drag: deltaX, deltaY * @property {Signal} dropped - on drop * @property {Signal} clicked - on click * @property {Signal} hovered - on hover */ /** * Mouse observer */ class MouseObserver{ /** * @param {Element} domElement - the dom element to observe mouse events in * @param {Object} params - parameters object * @param {Integer} params.hoverTimeout - timeout until the {@link MouseSignals.hovered} * signal is fired, set to -1 to ignore hovering */ constructor( domElement, params ){ /** * Events emitted by the mouse observer * @type {MouseSignals} */ this.signals = { moved: new Signal(), scrolled: new Signal(), dragged: new Signal(), dropped: new Signal(), clicked: new Signal(), hovered: new Signal() }; var p = Object.assign( {}, params ); this.hoverTimeout = defaults( p.hoverTimeout, 50 ); this.domElement = domElement; /** * Position on page * @type {Vector2} */ this.position = new Vector2(); /** * Previous position on page * @type {Vector2} */ this.prevPosition = new Vector2(); /** * Position on page when clicked * @type {Vector2} */ this.down = new Vector2(); /** * Position on dom element * @type {Vector2} */ this.canvasPosition = new Vector2(); /** * Flag indicating if the mouse is moving * @type {Boolean} */ this.moving = false; /** * Flag indicating if the mouse is hovering * @type {Boolean} */ this.hovering = true; /** * Flag indicating if there was a scolling event * since the last mouse move * @type {Boolean} */ this.scrolled = false; /** * Timestamp of last mouse move * @type {Number} */ this.lastMoved = Infinity; /** * Indicates which mouse button was pressed: * 0: No button; 1: Left button; 2: Middle button; 3: Right button * @type {Integer} */ this.which = undefined; /** * Flag indicating if the mouse is pressed down * @type {Boolean} */ this.pressed = undefined; /** * Flag indicating if the alt key is pressed * @type {Boolean} */ this.altKey = undefined; /** * Flag indicating if the ctrl key is pressed * @type {Boolean} */ this.ctrlKey = undefined; /** * Flag indicating if the meta key is pressed * @type {Boolean} */ this.metaKey = undefined; /** * Flag indicating if the shift key is pressed * @type {Boolean} */ this.shiftKey = undefined; this._listen = this._listen.bind( this ); this._onMousewheel = this._onMousewheel.bind( this ); this._onMousemove = this._onMousemove.bind( this ); this._onMousedown = this._onMousedown.bind( this ); this._onMouseup = this._onMouseup.bind( this ); this._listen(); domElement.addEventListener( 'mousewheel', this._onMousewheel ); domElement.addEventListener( 'wheel', this._onMousewheel ); domElement.addEventListener( 'MozMousePixelScroll', this._onMousewheel ); domElement.addEventListener( 'mousemove', this._onMousemove ); domElement.addEventListener( 'mousedown', this._onMousedown ); domElement.addEventListener( 'mouseup', this._onMouseup ); domElement.addEventListener( 'contextmenu', this._onContextmenu ); } setParameters( params ){ var p = Object.assign( {}, params ); this.hoverTimeout = defaults( p.hoverTimeout, this.hoverTimeout ); } /** * listen to mouse actions * @emits {MouseSignals.hovered} when hovered * @return {undefined} */ _listen(){ if( performance.now() - this.lastMoved > this.hoverTimeout ){ this.moving = false; } if( this.scrolled || ( !this.moving && !this.hovering ) ){ this.scrolled = false; if( this.hoverTimeout !== -1 ){ this.hovering = true; this.signals.hovered.dispatch(); } } requestAnimationFrame( this.listen ); } /** * handle mouse scroll * @emits {MouseSignals.scrolled} when scrolled * @param {Event} event - mouse event * @return {undefined} */ _onMousewheel( event ){ event.preventDefault(); this._setKeys( event ); var delta = 0; if( event.wheelDelta ){ // WebKit / Opera / Explorer 9 delta = event.wheelDelta / 40; }else if( event.detail ){ // Firefox delta = - event.detail / 3; }else{ // Firefox or IE 11 delta = - event.deltaY / ( event.deltaMode ? 0.33 : 30 ); } this.signals.scrolled.dispatch( delta ); setTimeout( () => { this.scrolled = true; }, this.hoverTimeout ); } /** * handle mouse move * @emits {MouseSignals.moved} when moved * @emits {MouseSignals.dragged} when dragged * @param {Event} event - mouse event * @return {undefined} */ _onMousemove( event ){ event.preventDefault(); this._setKeys( event ); this.moving = true; this.hovering = false; this.lastMoved = performance.now(); this.prevPosition.copy( this.position ); this.position.set( event.layerX, event.layerY ); this._setCanvasPosition( event ); var x = this.prevPosition.x - this.position.x; var y = this.prevPosition.y - this.position.y; this.signals.moved.dispatch( x, y ); if( this.pressed ){ this.signals.dragged.dispatch( x, y ); } } _onMousedown( event ){ event.preventDefault(); this._setKeys( event ); this.moving = false; this.hovering = false; this.down.set( event.layerX, event.layerY ); this.which = event.which; this.pressed = true; this.setCanvasPosition( event ); } /** * handle mouse up * @emits {MouseSignals.clicked} when clicked * @emits {MouseSignals.dropped} when dropped * @param {Event} event - mouse event * @return {undefined} */ _onMouseup( event ){ event.preventDefault(); this._setKeys( event ); this.signals.clicked.dispatch(); // if( this.distance() > 3 || event.which === RightMouseButton ){ // this.signals.dropped.dispatch(); // } this.which = undefined; this.pressed = undefined; } _onContextmenu( event ){ event.preventDefault(); } _distance(){ return this.position.distanceTo( this.down ); } _setCanvasPosition( event ){ var box = this.domElement.getBoundingClientRect(); var offsetX = event.clientX - box.left; var offsetY = event.clientY - box.top; this.canvasPosition.set( offsetX, box.height - offsetY ); } _setKeys( event ){ this.altKey = event.altKey; this.ctrlKey = event.ctrlKey; this.metaKey = event.metaKey; this.shiftKey = event.shiftKey; } dispose(){ var domElement = this.domElement; domElement.removeEventListener( 'mousewheel', this._onMousewheel ); domElement.removeEventListener( 'wheel', this._onMousewheel ); domElement.removeEventListener( 'MozMousePixelScroll', this._onMousewheel ); domElement.removeEventListener( 'mousemove', this._onMousemove ); domElement.removeEventListener( 'mousedown', this._onMousedown ); domElement.removeEventListener( 'mouseup', this._onMouseup ); domElement.removeEventListener( 'contextmenu', this._onContextmenu ); } } export default MouseObserver;
src/stage/mouse-observer.js
/** * @file Mouse Observer * @author Alexander Rose <[email protected]> * @private */ import { Vector2 } from "../../lib/three.es6.js"; import Signal from "../../lib/signals.es6.js"; import { defaults } from "../utils.js"; /** * @example * mouseObserver.signals.scroll.add( function( delta ){ ... } ); * * @typedef {Object} MouseSignals * @property {Signal<Integer, Integer>} moved - on move: deltaX, deltaY * @property {Signal<Number>} scrolled - on scroll: delta * @property {Signal<Integer, Integer>} dragged - on drag: deltaX, deltaY * @property {Signal} dropped - on drop * @property {Signal} clicked - on click * @property {Signal} hovered - on hover */ /** * Mouse observer */ class MouseObserver{ /** * @param {Element} domElement - the dom element to observe mouse events in * @param {Object} params - parameters object * @param {Integer} params.hoverTimeout - timeout until the {@link MouseSignals.hovered} * signal is fired, set to -1 to ignore hovering */ constructor( domElement, params ){ /** * Events emitted by the mouse observer * @type {MouseSignals} */ this.signals = { moved: new Signal(), scrolled: new Signal(), dragged: new Signal(), dropped: new Signal(), clicked: new Signal(), hovered: new Signal() }; var p = Object.assign( {}, params ); this.hoverTimeout = defaults( p.hoverTimeout, 50 ); this.domElement = domElement; /** * Position on page * @type {Vector2} */ this.position = new Vector2(); /** * Previous position on page * @type {Vector2} */ this.prevPosition = new Vector2(); /** * Position on page when clicked * @type {Vector2} */ this.down = new Vector2(); /** * Position on dom element * @type {Vector2} */ this.canvasPosition = new Vector2(); /** * Flag indicating if the mouse is moving * @type {Boolean} */ this.moving = false; /** * Flag indicating if the mouse is hovering * @type {Boolean} */ this.hovering = true; /** * Flag indicating if there was a scolling event * since the last mouse move * @type {Boolean} */ this.scrolled = false; /** * Timestamp of last mouse move * @type {Number} */ this.lastMoved = Infinity; /** * Indicates which mouse button was pressed: * 0: No button; 1: Left button; 2: Middle button; 3: Right button * @type {Integer} */ this.which = undefined; /** * Flag indicating if the mouse is pressed down * @type {Boolean} */ this.pressed = undefined; /** * Flag indicating if the alt key is pressed * @type {Boolean} */ this.altKey = undefined; /** * Flag indicating if the ctrl key is pressed * @type {Boolean} */ this.ctrlKey = undefined; /** * Flag indicating if the meta key is pressed * @type {Boolean} */ this.metaKey = undefined; /** * Flag indicating if the shift key is pressed * @type {Boolean} */ this.shiftKey = undefined; this._listen = this._listen.bind( this ); this._onMousewheel = this._onMousewheel.bind( this ); this._onMousemove = this._onMousemove.bind( this ); this._onMousedown = this._onMousedown.bind( this ); this._onMouseup = this._onMouseup.bind( this ); this.listen(); domElement.addEventListener( 'mousewheel', this._onMousewheel ); domElement.addEventListener( 'wheel', this._onMousewheel ); domElement.addEventListener( 'MozMousePixelScroll', this._onMousewheel ); domElement.addEventListener( 'mousemove', this._onMousemove ); domElement.addEventListener( 'mousedown', this._onMousedown ); domElement.addEventListener( 'mouseup', this._onMouseup ); domElement.addEventListener( 'contextmenu', this._onContextmenu ); } setParameters( params ){ var p = Object.assign( {}, params ); this.hoverTimeout = defaults( p.hoverTimeout, this.hoverTimeout ); } /** * listen to mouse actions * @emits {MouseSignals.hovered} when hovered * @return {undefined} */ _listen(){ if( performance.now() - this.lastMoved > this.hoverTimeout ){ this.moving = false; } if( this.scrolled || ( !this.moving && !this.hovering ) ){ this.scrolled = false; if( this.hoverTimeout !== -1 ){ this.hovering = true; this.signals.hovered.dispatch(); } } requestAnimationFrame( this.listen ); } /** * handle mouse scroll * @emits {MouseSignals.scrolled} when scrolled * @param {Event} event - mouse event * @return {undefined} */ _onMousewheel( event ){ event.preventDefault(); this._setKeys( event ); var delta = 0; if( event.wheelDelta ){ // WebKit / Opera / Explorer 9 delta = event.wheelDelta / 40; }else if( event.detail ){ // Firefox delta = - event.detail / 3; }else{ // Firefox or IE 11 delta = - event.deltaY / ( event.deltaMode ? 0.33 : 30 ); } this.signals.scrolled.dispatch( delta ); setTimeout( () => { this.scrolled = true; }, this.hoverTimeout ); } /** * handle mouse move * @emits {MouseSignals.moved} when moved * @emits {MouseSignals.dragged} when dragged * @param {Event} event - mouse event * @return {undefined} */ _onMousemove( event ){ event.preventDefault(); this._setKeys( event ); this.moving = true; this.hovering = false; this.lastMoved = performance.now(); this.prevPosition.copy( this.position ); this.position.set( event.layerX, event.layerY ); this._setCanvasPosition( event ); var x = this.prevPosition.x - this.position.x; var y = this.prevPosition.y - this.position.y; this.signals.moved.dispatch( x, y ); if( this.pressed ){ this.signals.dragged.dispatch( x, y ); } } _onMousedown( event ){ event.preventDefault(); this._setKeys( event ); this.moving = false; this.hovering = false; this.down.set( event.layerX, event.layerY ); this.which = event.which; this.pressed = true; this.setCanvasPosition( event ); } /** * handle mouse up * @emits {MouseSignals.clicked} when clicked * @emits {MouseSignals.dropped} when dropped * @param {Event} event - mouse event * @return {undefined} */ _onMouseup( event ){ event.preventDefault(); this._setKeys( event ); this.signals.clicked.dispatch(); // if( this.distance() > 3 || event.which === RightMouseButton ){ // this.signals.dropped.dispatch(); // } this.which = undefined; this.pressed = undefined; } _onContextmenu( event ){ event.preventDefault(); } _distance(){ return this.position.distanceTo( this.down ); } _setCanvasPosition( event ){ var box = this.domElement.getBoundingClientRect(); var offsetX = event.clientX - box.left; var offsetY = event.clientY - box.top; this.canvasPosition.set( offsetX, box.height - offsetY ); } _setKeys( event ){ this.altKey = event.altKey; this.ctrlKey = event.ctrlKey; this.metaKey = event.metaKey; this.shiftKey = event.shiftKey; } dispose(){ var domElement = this.domElement; domElement.removeEventListener( 'mousewheel', this._onMousewheel ); domElement.removeEventListener( 'wheel', this._onMousewheel ); domElement.removeEventListener( 'MozMousePixelScroll', this._onMousewheel ); domElement.removeEventListener( 'mousemove', this._onMousemove ); domElement.removeEventListener( 'mousedown', this._onMousedown ); domElement.removeEventListener( 'mouseup', this._onMouseup ); domElement.removeEventListener( 'contextmenu', this._onContextmenu ); } } export default MouseObserver;
method name typo
src/stage/mouse-observer.js
method name typo
<ide><path>rc/stage/mouse-observer.js <ide> this._onMousedown = this._onMousedown.bind( this ); <ide> this._onMouseup = this._onMouseup.bind( this ); <ide> <del> this.listen(); <add> this._listen(); <ide> <ide> domElement.addEventListener( 'mousewheel', this._onMousewheel ); <ide> domElement.addEventListener( 'wheel', this._onMousewheel );
Java
epl-1.0
fd28c651ff03a79de402af41363de09b147f61fb
0
akurtakov/Pydev,fabioz/Pydev,rajul/Pydev,rgom/Pydev,akurtakov/Pydev,fabioz/Pydev,aptana/Pydev,akurtakov/Pydev,aptana/Pydev,RandallDW/Aruba_plugin,fabioz/Pydev,rajul/Pydev,rgom/Pydev,RandallDW/Aruba_plugin,fabioz/Pydev,rajul/Pydev,akurtakov/Pydev,rajul/Pydev,akurtakov/Pydev,aptana/Pydev,rgom/Pydev,fabioz/Pydev,RandallDW/Aruba_plugin,RandallDW/Aruba_plugin,rgom/Pydev,fabioz/Pydev,RandallDW/Aruba_plugin,rgom/Pydev,rajul/Pydev,rgom/Pydev,rajul/Pydev,RandallDW/Aruba_plugin,akurtakov/Pydev
package com.aptana.js.interactive_console.rhino; import java.io.OutputStream; import java.io.PrintStream; import java.util.ArrayList; import java.util.List; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import org.mozilla.javascript.Context; import org.mozilla.javascript.ContextFactory; import org.mozilla.javascript.Function; import org.mozilla.javascript.Scriptable; import org.mozilla.javascript.ScriptableObject; import org.mozilla.javascript.Undefined; import org.mozilla.javascript.tools.shell.Global; import org.python.pydev.core.log.Log; /** * This is the basic implementation for the interpreter. Note that it's much more complicated than * it appears it should be because rhino commands must all be evaluated at the same thread, so, * it creates an internal thread for that purpose and uses a queue to communicate with it. * * Note that this also means that the interpreter must be properly disposed. * * @author Fabio Zadrozny */ public class RhinoInterpreter { private static final boolean DEBUG = false; /** * Command to be passed to the thread. */ private static interface ICommand { void evaluate(); } public static final Object NO_RESULT = new Object() { public String toString() { return "NO_RESULT"; }; }; /** * Command to signal that the thread/interpreter should be disposed. */ private class DisposeCommand implements ICommand { public void evaluate() { //Does nothing: just used to notify that the thread should be disposed. } @Override public String toString() { return "DisposeCommand"; } } /** * Some command that evaluates in the thread and returns some result. */ private abstract class AbstractResultCommand implements ICommand { private final Object lock = new Object(); /** * The result of the evaluation. */ private volatile Object result; /** * Some exception that happened during the evaluation. */ private volatile Throwable exception; public AbstractResultCommand() { this.result = NO_RESULT; } public void evaluate() { try { this.result = onEvaluate(); } catch (Throwable e) { this.exception = e; } synchronized (lock) { lock.notify(); } } /** * Subclasses must override to do the actual evaluation. */ protected abstract Object onEvaluate(); /** * If the evaluation throws an exception, rethrows it. */ public Object getResult() throws Exception { while (true) { if (DEBUG) { System.out.println("Locking to get result for: " + this); } try { synchronized (lock) { if (DEBUG) { System.out.println("Result for: " + this + ": " + result + " - Exception: " + this.exception); } if (this.exception != null) { if (this.exception instanceof Exception) { throw (Exception) this.exception; } else { throw new RuntimeException(this.exception); } } if (this.result != NO_RESULT) { return this.result; } lock.wait(); } } catch (InterruptedException e) { //ignore } } } } /** * Command to evaluate something. Clients get locked in the getResult() until the result is * available. */ private class EvalCommand extends AbstractResultCommand { private String source; private int line; public EvalCommand(String source, int line) { super(); this.source = source; this.line = line; } /** * Returns undefined or a string-representation of the evaluation. */ protected Object onEvaluate() { Object eval; if (DEBUG) { try { eval = cx.evaluateString(global, source, "eval", line, null); } catch (RuntimeException e) { e.printStackTrace(); throw e; } } else { eval = cx.evaluateString(global, source, "eval", line, null); } if (!(eval instanceof Undefined)) { return Context.toString(eval); } return eval; //return undefined } @Override public String toString() { return "EvalCommand: " + source; } } /** * Evaluation context */ private Context cx; /** * Scope where the evaluation should happen */ private Global global; /** * Queue to help in synchronizing commands. */ private final BlockingQueue<ICommand> queue = new LinkedBlockingQueue<ICommand>(); private class RhinoInterpreterThread extends Thread { /** * Whether the thread should keep running. */ private volatile boolean finished = false; public RhinoInterpreterThread() { super(); setName("RhinoInterpreterThread"); } @Override public void run() { ContextFactory contextFactory = new ContextFactory(); RhinoInterpreter.this.global = new Global(); RhinoInterpreter.this.global.init(contextFactory); RhinoInterpreter.this.cx = contextFactory.enterContext(); while (!finished) { try { ICommand cmd = queue.take(); if (cmd instanceof DisposeCommand) { finished = true; } if (DEBUG) { System.out.println("About to evaluate: " + cmd); } try { cmd.evaluate(); } catch (Throwable e) { if (DEBUG) { System.out.println("Evaluation finished with ERROR: " + cmd); } Log.log(e); } if (DEBUG) { System.out.println("Finished evaluation: " + cmd); } } catch (InterruptedException e) { //ignore } } } public void setErr(final OutputStream stream) { global.setErr(new PrintStream(stream)); } public void setOut(final OutputStream stream) { global.setOut(new PrintStream(stream)); } public PrintStream getOut() { return global.getOut(); } public Object getErr() { return global.getErr(); } /** * Array with tuples with name, doc, args, type */ public List<Object[]> getCompletions(String text, String actTok) { ArrayList<Object[]> ret = new ArrayList<Object[]>(); Scriptable obj; int index = actTok.lastIndexOf('.'); if (index != -1) { String var = actTok.substring(0, index); actTok = actTok.substring(index + 1); try { Object eval = cx.evaluateString(global, var, "<eval>", 0, null); if (eval instanceof Scriptable) { obj = (Scriptable) eval; } else { return ret; //not something we can complete on. } } catch (Exception e) { return ret; //unable to get variable. } } else { obj = global; } Object val = obj.get(actTok, global); if (val instanceof Scriptable) { obj = (Scriptable) val; } Object[] ids; if (obj instanceof ScriptableObject) { ids = ((ScriptableObject) obj).getAllIds(); } else { ids = obj.getIds(); } //types: // function: 2 // local: 9 // see: IToken.TYPE_ String lastPart = actTok.toLowerCase(); for (int i = 0; i < ids.length; i++) { if (!(ids[i] instanceof String)) { continue; } String id = (String) ids[i]; if (id.toLowerCase().startsWith(lastPart)) { if (obj.get(id, obj) instanceof Function) { ret.add(new Object[] { id, "", "()", 2 }); } else { ret.add(new Object[] { id, "", "", 9 }); } } } return ret; } } private RhinoInterpreterThread rhinoThread; public RhinoInterpreter() { rhinoThread = new RhinoInterpreterThread(); rhinoThread.start(); } /** * Helper to add some command to the queue. * @param command * @return */ protected ICommand addCommand(ICommand command) { if (DEBUG) { System.out.println("Adding command: " + command); } boolean added = false; while (!added) { try { queue.put(command); added = true; } catch (InterruptedException e) { } } return command; } /** * Throws an exception if command was not properly evaluated. */ public Object eval(String source) throws Exception { return eval(source, 0); } /** * Throws an exception if command was not properly evaluated. */ public Object eval(String source, int line) throws Exception { EvalCommand command = new EvalCommand(source, line); addCommand(command); return command.getResult(); } public void setErr(final OutputStream stream) { addCommand(new ICommand() { public void evaluate() { rhinoThread.setErr(new PrintStream(stream)); } }); } public void setOut(final OutputStream stream) { addCommand(new ICommand() { public void evaluate() { rhinoThread.setOut(new PrintStream(stream)); } }); } public PrintStream getOut() { AbstractResultCommand cmd = new AbstractResultCommand() { @Override protected Object onEvaluate() { return rhinoThread.getOut(); } @Override public String toString() { return "AbstractResultCommand:getOut()"; } }; addCommand(cmd); try { return (PrintStream) cmd.getResult(); } catch (Exception e) { throw new RuntimeException(e); } } public void dispose() { addCommand(new DisposeCommand()); } public PrintStream getErr() { AbstractResultCommand cmd = new AbstractResultCommand() { @Override protected Object onEvaluate() { return rhinoThread.getErr(); } @Override public String toString() { return "AbstractResultCommand:getErr()"; } }; addCommand(cmd); try { return (PrintStream) cmd.getResult(); } catch (Exception e) { throw new RuntimeException(e); } } public Object getDescription(String evalStr) throws Exception { AbstractResultCommand cmd = new EvalCommand(evalStr, 0) { @Override protected Object onEvaluate() { Object o = super.onEvaluate(); return Context.toString(o); } @Override public String toString() { return "EvalCommand:getDescription()"; } }; addCommand(cmd); return cmd.getResult(); } @SuppressWarnings("unchecked") public List<Object[]> getCompletions(final String text, final String actTok) { AbstractResultCommand cmd = new AbstractResultCommand() { @Override protected Object onEvaluate() { return rhinoThread.getCompletions(text, actTok); } @Override public String toString() { return "AbstractResultCommand:getCompletions()"; } }; addCommand(cmd); try { return (List<Object[]>) cmd.getResult(); } catch (Exception e) { throw new RuntimeException(e); } } }
plugins/com.aptana.js.interactive_console/src_rhino_console/com/aptana/js/interactive_console/rhino/RhinoInterpreter.java
package com.aptana.js.interactive_console.rhino; import java.io.OutputStream; import java.io.PrintStream; import java.util.ArrayList; import java.util.List; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import org.mozilla.javascript.Context; import org.mozilla.javascript.ContextFactory; import org.mozilla.javascript.Function; import org.mozilla.javascript.Scriptable; import org.mozilla.javascript.ScriptableObject; import org.mozilla.javascript.Undefined; import org.mozilla.javascript.tools.shell.Global; import org.python.pydev.core.log.Log; /** * This is the basic implementation for the interpreter. Note that it's much more complicated than * it appears it should be because rhino commands must all be evaluated at the same thread, so, * it creates an internal thread for that purpose and uses a queue to synchronize things with it. * * Note that this also means that the interpreter must be properly disposed. * * @author Fabio Zadrozny */ public class RhinoInterpreter { private static final boolean DEBUG = false; /** * Command to be passed to the thread. */ private static interface ICommand { void evaluate(); } public static final Object NO_RESULT = new Object() { public String toString() { return "NO_RESULT"; }; }; /** * Command to signal that the thread/interpreter should be disposed. */ private class DisposeCommand implements ICommand { public void evaluate() { //Does nothing: just used to notify that the thread should be disposed. } @Override public String toString() { return "DisposeCommand"; } } /** * Some command that evaluates in the thread and returns some result. */ private abstract class AbstractResultCommand implements ICommand { private final Object lock = new Object(); /** * The result of the evaluation. */ private volatile Object result; /** * Some exception that happened during the evaluation. */ private volatile Throwable exception; public AbstractResultCommand() { this.result = NO_RESULT; } public void evaluate() { try { this.result = onEvaluate(); } catch (Throwable e) { this.exception = e; } synchronized (lock) { lock.notify(); } } /** * Subclasses must override to do the actual evaluation. */ protected abstract Object onEvaluate(); /** * If the evaluation throws an exception, rethrows it. */ public Object getResult() throws Exception { while (true) { if (DEBUG) { System.out.println("Locking to get result for: " + this); } try { synchronized (lock) { if (DEBUG) { System.out.println("Result for: " + this + ": " + result + " - Exception: " + this.exception); } if (this.exception != null) { if (this.exception instanceof Exception) { throw (Exception) this.exception; } else { throw new RuntimeException(this.exception); } } if (this.result != NO_RESULT) { return this.result; } lock.wait(); } } catch (InterruptedException e) { //ignore } } } } /** * Command to evaluate something. Clients get locked in the getResult() until the result is * available. */ private class EvalCommand extends AbstractResultCommand { private String source; private int line; public EvalCommand(String source, int line) { super(); this.source = source; this.line = line; } /** * Returns undefined or a string-representation of the evaluation. */ protected Object onEvaluate() { Object eval; if (DEBUG) { try { eval = cx.evaluateString(global, source, "eval", line, null); } catch (RuntimeException e) { e.printStackTrace(); throw e; } } else { eval = cx.evaluateString(global, source, "eval", line, null); } if (!(eval instanceof Undefined)) { return Context.toString(eval); } return eval; //return undefined } @Override public String toString() { return "EvalCommand: " + source; } } /** * Evaluation context */ private Context cx; /** * Scope where the evaluation should happen */ private Global global; /** * Queue to help in synchronizing commands. */ private final BlockingQueue<ICommand> queue = new LinkedBlockingQueue<ICommand>(); private class RhinoInterpreterThread extends Thread { /** * Whether the thread should keep running. */ private volatile boolean finished = false; public RhinoInterpreterThread() { super(); setName("RhinoInterpreterThread"); } @Override public void run() { ContextFactory contextFactory = new ContextFactory(); RhinoInterpreter.this.global = new Global(); RhinoInterpreter.this.global.init(contextFactory); RhinoInterpreter.this.cx = contextFactory.enterContext(); while (!finished) { try { ICommand cmd = queue.take(); if (cmd instanceof DisposeCommand) { finished = true; } if (DEBUG) { System.out.println("About to evaluate: " + cmd); } try { cmd.evaluate(); } catch (Throwable e) { if (DEBUG) { System.out.println("Evaluation finished with ERROR: " + cmd); } Log.log(e); } if (DEBUG) { System.out.println("Finished evaluation: " + cmd); } } catch (InterruptedException e) { //ignore } } } public void setErr(final OutputStream stream) { global.setErr(new PrintStream(stream)); } public void setOut(final OutputStream stream) { global.setOut(new PrintStream(stream)); } public PrintStream getOut() { return global.getOut(); } public Object getErr() { return global.getErr(); } /** * Array with tuples with name, doc, args, type */ public List<Object[]> getCompletions(String text, String actTok) { ArrayList<Object[]> ret = new ArrayList<Object[]>(); Scriptable obj; int index = actTok.lastIndexOf('.'); if (index != -1) { String var = actTok.substring(0, index); actTok = actTok.substring(index + 1); try { Object eval = cx.evaluateString(global, var, "<eval>", 0, null); if (eval instanceof Scriptable) { obj = (Scriptable) eval; } else { return ret; //not something we can complete on. } } catch (Exception e) { return ret; //unable to get variable. } } else { obj = global; } Object val = obj.get(actTok, global); if (val instanceof Scriptable) { obj = (Scriptable) val; } Object[] ids; if (obj instanceof ScriptableObject) { ids = ((ScriptableObject) obj).getAllIds(); } else { ids = obj.getIds(); } //types: // function: 2 // local: 9 // see: IToken.TYPE_ String lastPart = actTok.toLowerCase(); for (int i = 0; i < ids.length; i++) { if (!(ids[i] instanceof String)) { continue; } String id = (String) ids[i]; if (id.toLowerCase().startsWith(lastPart)) { if (obj.get(id, obj) instanceof Function) { ret.add(new Object[] { id, "", "()", 2 }); } else { ret.add(new Object[] { id, "", "", 9 }); } } } return ret; } } private RhinoInterpreterThread rhinoThread; public RhinoInterpreter() { rhinoThread = new RhinoInterpreterThread(); rhinoThread.start(); } /** * Helper to add some command to the queue. * @param command * @return */ protected ICommand addCommand(ICommand command) { if (DEBUG) { System.out.println("Adding command: " + command); } boolean added = false; while (!added) { try { queue.put(command); added = true; } catch (InterruptedException e) { } } return command; } /** * Throws an exception if command was not properly evaluated. */ public Object eval(String source) throws Exception { return eval(source, 0); } /** * Throws an exception if command was not properly evaluated. */ public Object eval(String source, int line) throws Exception { EvalCommand command = new EvalCommand(source, line); addCommand(command); return command.getResult(); } public void setErr(final OutputStream stream) { addCommand(new ICommand() { public void evaluate() { rhinoThread.setErr(new PrintStream(stream)); } }); } public void setOut(final OutputStream stream) { addCommand(new ICommand() { public void evaluate() { rhinoThread.setOut(new PrintStream(stream)); } }); } public PrintStream getOut() { AbstractResultCommand cmd = new AbstractResultCommand() { @Override protected Object onEvaluate() { return rhinoThread.getOut(); } @Override public String toString() { return "AbstractResultCommand:getOut()"; } }; addCommand(cmd); try { return (PrintStream) cmd.getResult(); } catch (Exception e) { throw new RuntimeException(e); } } public void dispose() { addCommand(new DisposeCommand()); } public PrintStream getErr() { AbstractResultCommand cmd = new AbstractResultCommand() { @Override protected Object onEvaluate() { return rhinoThread.getErr(); } @Override public String toString() { return "AbstractResultCommand:getErr()"; } }; addCommand(cmd); try { return (PrintStream) cmd.getResult(); } catch (Exception e) { throw new RuntimeException(e); } } public Object getDescription(String evalStr) throws Exception { AbstractResultCommand cmd = new EvalCommand(evalStr, 0) { @Override protected Object onEvaluate() { Object o = super.onEvaluate(); return Context.toString(o); } @Override public String toString() { return "EvalCommand:getDescription()"; } }; addCommand(cmd); return cmd.getResult(); } @SuppressWarnings("unchecked") public List<Object[]> getCompletions(final String text, final String actTok) { AbstractResultCommand cmd = new AbstractResultCommand() { @Override protected Object onEvaluate() { return rhinoThread.getCompletions(text, actTok); } @Override public String toString() { return "AbstractResultCommand:getCompletions()"; } }; addCommand(cmd); try { return (List<Object[]>) cmd.getResult(); } catch (Exception e) { throw new RuntimeException(e); } } }
Minor (comments only).
plugins/com.aptana.js.interactive_console/src_rhino_console/com/aptana/js/interactive_console/rhino/RhinoInterpreter.java
Minor (comments only).
<ide><path>lugins/com.aptana.js.interactive_console/src_rhino_console/com/aptana/js/interactive_console/rhino/RhinoInterpreter.java <ide> /** <ide> * This is the basic implementation for the interpreter. Note that it's much more complicated than <ide> * it appears it should be because rhino commands must all be evaluated at the same thread, so, <del> * it creates an internal thread for that purpose and uses a queue to synchronize things with it. <add> * it creates an internal thread for that purpose and uses a queue to communicate with it. <ide> * <ide> * Note that this also means that the interpreter must be properly disposed. <ide> *
Java
apache-2.0
094461892bb2304388e8fe3c9f227cbd88e3bf40
0
gastaldi/hibernate-validator,flibbertigibbet/hibernate-validator-android,shahramgdz/hibernate-validator,DavideD/hibernate-validator,mxrenkin/hibernate-validator,fazerish/hibernate-validator,mxrenkin/hibernate-validator,hibernate/hibernate-validator,shahramgdz/hibernate-validator,mxrenkin/hibernate-validator,fazerish/hibernate-validator,gastaldi/hibernate-validator,DavideD/hibernate-validator,mxrenkin/hibernate-validator,mohanaraosv/hibernate-validator,hibernate/hibernate-validator,mohanaraosv/hibernate-validator,mohanaraosv/hibernate-validator,fazerish/hibernate-validator,flibbertigibbet/hibernate-validator-android,hferentschik/hibernate-validator,marko-bekhta/hibernate-validator,emmanuelbernard/hibernate-validator,hferentschik/hibernate-validator,DavideD/hibernate-validator,shahramgdz/hibernate-validator,flibbertigibbet/hibernate-validator-android,marko-bekhta/hibernate-validator,mohanaraosv/hibernate-validator,fazerish/hibernate-validator,hferentschik/hibernate-validator,flibbertigibbet/hibernate-validator-android,marko-bekhta/hibernate-validator,DavideD/hibernate-validator,shahramgdz/hibernate-validator,hibernate/hibernate-validator
// $Id$ /* * JBoss, Home of Professional Open Source * Copyright 2008, Red Hat Middleware LLC, and individual contributors * by the @authors tag. See the copyright.txt in the distribution for a * full listing of individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hibernate.validation.util; import java.beans.Introspector; import java.lang.annotation.Annotation; import java.lang.reflect.AccessibleObject; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Member; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.lang.reflect.WildcardType; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.validation.ValidationException; import com.googlecode.jtype.TypeUtils; /** * Some reflection utility methods. * * @author Hardy Ferentschik */ public class ReflectionHelper { /** * Private constructor in order to avoid instantiation. */ private ReflectionHelper() { } @SuppressWarnings("unchecked") public static <T> T getAnnotationParameter(Annotation annotation, String parameterName, Class<T> type) { try { Method m = annotation.getClass().getMethod( parameterName ); Object o = m.invoke( annotation ); if ( o.getClass().getName().equals( type.getName() ) ) { return ( T ) o; } else { String msg = "Wrong parameter type. Expected: " + type.getName() + " Actual: " + o.getClass().getName(); throw new ValidationException( msg ); } } catch ( NoSuchMethodException e ) { String msg = "The specified annotation defines no parameter '" + parameterName + "'."; throw new ValidationException( msg, e ); } catch ( IllegalAccessException e ) { String msg = "Unable to get '" + parameterName + "' from " + annotation.getClass().getName(); throw new ValidationException( msg, e ); } catch ( InvocationTargetException e ) { String msg = "Unable to get '" + parameterName + "' from " + annotation.getClass().getName(); throw new ValidationException( msg, e ); } } /** * Process bean properties getter by applying the JavaBean naming conventions. * * @param member the member for which to get the property name. * * @return The bean method name with the "is" or "get" prefix stripped off, <code>null</code> * the method name id not according to the JavaBeans standard. */ public static String getPropertyName(Member member) { String name = null; if ( member instanceof Field ) { name = member.getName(); } if ( member instanceof Method ) { String methodName = member.getName(); if ( methodName.startsWith( "is" ) ) { name = Introspector.decapitalize( methodName.substring( 2 ) ); } else if ( methodName.startsWith( "get" ) ) { name = Introspector.decapitalize( methodName.substring( 3 ) ); } } return name; } /** * Returns the type of the field of return type of a method. * * @param member the member for which to get the type. * * @return Returns the type of the field of return type of a method. */ public static Class<?> getType(Member member) { Class<?> type = null; if ( member instanceof Field ) { type = ( ( Field ) member ).getType(); } if ( member instanceof Method ) { type = ( ( Method ) member ).getReturnType(); } return type; } /** * Returns the type of the field of return type of a method. * * @param member the member for which to get the type. * * @return Returns the type of the field of return type of a method. */ public static Class<?> getAnnotations(Member member) { Class<?> type = null; if ( member instanceof Field ) { type = ( ( Field ) member ).getType(); } if ( member instanceof Method ) { type = ( ( Method ) member ).getReturnType(); } return type; } /** * @param member The <code>Member</code> instance for which to retrieve the type. * * @return Retrurns the <code>Type</code> of the given <code>Field</code> or <code>Method</code>. * * @throws IllegalArgumentException in case <code>member</code> is not a <code>Field</code> or <code>Method</code>. */ public static Type typeOf(Member member) { if ( member instanceof Field ) { return ( ( Field ) member ).getGenericType(); } if ( member instanceof Method ) { return ( ( Method ) member ).getGenericReturnType(); } throw new IllegalArgumentException( "Member " + member + " is neither a field nor a method" ); } public static Object getValue(Member member, Object object) { Object value = null; if ( member instanceof Method ) { Method method = ( Method ) member; try { value = method.invoke( object ); } catch ( IllegalAccessException e ) { throw new ValidationException( "Unable to access " + method.getName(), e ); } catch ( InvocationTargetException e ) { throw new ValidationException( "Unable to access " + method.getName(), e ); } } else if ( member instanceof Field ) { Field field = ( Field ) member; try { value = field.get( object ); } catch ( IllegalAccessException e ) { throw new ValidationException( "Unable to access " + field.getName(), e ); } } return value; } public static void setAccessibility(Member member) { if ( !Modifier.isPublic( member.getModifiers() ) ) { //Sun's ease of use, sigh... ( ( AccessibleObject ) member ).setAccessible( true ); } } public static Class<?> loadClass(String name, Class<?> caller) throws ClassNotFoundException { try { //try context classloader, if fails try caller classloader ClassLoader loader = Thread.currentThread().getContextClassLoader(); if ( loader != null ) { return loader.loadClass( name ); } } catch ( ClassNotFoundException e ) { //trying caller classloader if ( caller == null ) { throw e; } } return Class.forName( name, true, caller.getClassLoader() ); } /** * Determines the type of elements of an <code>Iterable</code>, array or the value of a <code>Map</code>. * * @param type the type to inspect * * @return Returns the type of elements of an <code>Iterable</code>, array or the value of a <code>Map</code>. <code> * null</code> is returned in case the type is not indexable (in the context of JSR 303). */ public static Type getIndexedType(Type type) { Type indexedType = null; if ( isIterable( type ) && type instanceof ParameterizedType ) { ParameterizedType paramType = ( ParameterizedType ) type; indexedType = paramType.getActualTypeArguments()[0]; } else if ( isMap( type ) && type instanceof ParameterizedType ) { ParameterizedType paramType = ( ParameterizedType ) type; indexedType = paramType.getActualTypeArguments()[1]; } else if ( TypeUtils.isArray( type ) ) { indexedType = TypeUtils.getComponentType( type ); } return indexedType; } /** * @param type the type to check. * * @return Returns <code>true</code> if <code>type</code> is a iterable type, <code>false</code> otherwise. */ public static boolean isIterable(Type type) { if ( type instanceof Class && isIterableClass( ( Class ) type ) ) { return true; } if ( type instanceof ParameterizedType ) { return isIterable( ( ( ParameterizedType ) type ).getRawType() ); } if ( type instanceof WildcardType ) { Type[] upperBounds = ( ( WildcardType ) type ).getUpperBounds(); return upperBounds.length != 0 && isIterable( upperBounds[0] ); } return false; } /** * @param type the type to check. * * @return Returns <code>true</code> if <code>type</code> is implementing <code>Map</code>, <code>false</code> otherwise. */ public static boolean isMap(Type type) { if ( type instanceof Class && isMapClass( ( Class ) type ) ) { return true; } if ( type instanceof ParameterizedType ) { return isMap( ( ( ParameterizedType ) type ).getRawType() ); } if ( type instanceof WildcardType ) { Type[] upperBounds = ( ( WildcardType ) type ).getUpperBounds(); return upperBounds.length != 0 && isMap( upperBounds[0] ); } return false; } /** * Tries to retrieve the indexed value from the specified object. * * @param value The object from which to retrieve the indexed value. The object has to be non <code>null</null> and * either a collection or array. * @param index The index. The index does not have to be numerical. <code>value</code> could also be a map in which * case the index could also be a string key. * * @return The indexed value or <code>null</code> if <code>value</code> is <code>null</code> or not a collection or array. * <code>null</code> is also returned in case the index does not exist. */ public static Object getIndexedValue(Object value, String index) { if ( value == null ) { return null; } // try to create the index int numIndex = -1; try { numIndex = Integer.valueOf( index ); } catch ( NumberFormatException nfe ) { // ignore } if ( numIndex == -1 ) { // must be a map indexed by string Map<?, ?> map = ( Map<?, ?> ) value; //noinspection SuspiciousMethodCalls return map.get( index ); } Iterator<?> iter = null; Type type = value.getClass(); if ( isIterable( type ) ) { iter = ( ( Iterable<?> ) value ).iterator(); } else if ( isMap( type ) ) { Map<?, ?> map = ( Map<?, ?> ) value; iter = map.values().iterator(); } else if ( TypeUtils.isArray( type ) ) { List<?> arrayList = Arrays.asList( value ); iter = arrayList.iterator(); } int i = 0; Object o; while ( iter.hasNext() ) { o = iter.next(); if ( i == numIndex ) { return o; } i++; } return null; } /** * Checks whether the specified class contains a field or property matching the given name. * * @param clazz The class to check. * @param property The property name. * * @return Returns <code>true</code> if the cass contains a field or member for the specified property, <code> * false</code> otherwise. */ public static boolean containsMember(Class<?> clazz, String property) { return containsField( clazz, property ) || containsMethod( clazz, property ); } /** * Checks whether the specified class contains a field matching the specified name. * * @param clazz The class to check. * @param fieldName The field name. * * @return Returns <code>true</code> if the cass contains a field for the specified name, <code> * false</code> otherwise. */ public static boolean containsField(Class<?> clazz, String fieldName) { try { clazz.getDeclaredField( fieldName ); return true; } catch ( NoSuchFieldException e ) { return false; } } /** * Returns the field with the specified name or <code>null</code> if it does not exist. * * @param clazz The class to check. * @param fieldName The field name. * * @return Returns the field with the specified name or <code>null</code> if it does not exist. */ public static Field getField(Class<?> clazz, String fieldName) { try { Field field = clazz.getDeclaredField( fieldName ); setAccessibility( field ); return field; } catch ( NoSuchFieldException e ) { return null; } } /** * Checks whether the specified class contains a method matching the specified name. * * @param clazz The class to check. * @param methodName The method name. * * @return Returns <code>true</code> if the cass contains a property for the specified name, <code> * false</code> otherwise. */ public static boolean containsMethod(Class<?> clazz, String methodName) { return getMethod( clazz, methodName ) != null; } /** * Returns the method with the specified name or <code>null</code> if it does not exist. * * @param clazz The class to check. * @param methodName The method name. * * @return Returns the method with the specified name or <code>null</code> if it does not exist. */ public static Method getMethod(Class<?> clazz, String methodName) { try { char string[] = methodName.toCharArray(); string[0] = Character.toUpperCase( string[0] ); methodName = new String( string ); try { return clazz.getMethod( "get" + methodName ); } catch ( NoSuchMethodException e ) { return clazz.getMethod( "is" + methodName ); } } catch ( NoSuchMethodException e ) { return null; } } public static Class<?> classForName(String name, Class<?> caller) throws ClassNotFoundException { try { ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); if ( contextClassLoader != null ) { return contextClassLoader.loadClass( name ); } } catch ( Throwable e ) { // ignore } return Class.forName( name, true, caller.getClassLoader() ); } /** * Get all superclasses and interfaces recursively. * * @param clazz The class to start the search with. * @param classes List of classes to which to add all found super classes and interfaces. */ private static void computeClassHierarchy(Class<?> clazz, List<Class<?>> classes) { for ( Class current = clazz; current != null; current = current.getSuperclass() ) { if ( classes.contains( current ) ) { return; } classes.add( current ); for ( Class currentInterface : current.getInterfaces() ) { computeClassHierarchy( currentInterface, classes ); } } } /** * Checks whether the specified class parameter is an instance of a collection class. * * @param clazz <code>Class</code> to check. * * @return <code>true</code> is <code>clazz</code> is instance of a collection class, <code>false</code> otherwise. */ private static boolean isIterableClass(Class<?> clazz) { List<Class<?>> classes = new ArrayList<Class<?>>(); computeClassHierarchy( clazz, classes ); return classes.contains( Iterable.class ); } /** * Checks whether the specified class parameter is an instance of a collection class. * * @param clazz <code>Class</code> to check. * * @return <code>true</code> is <code>clazz</code> is instance of a collection class, <code>false</code> otherwise. */ private static boolean isMapClass(Class<?> clazz) { List<Class<?>> classes = new ArrayList<Class<?>>(); computeClassHierarchy( clazz, classes ); return classes.contains( Map.class ); } }
hibernate-validator/src/main/java/org/hibernate/validation/util/ReflectionHelper.java
// $Id$ /* * JBoss, Home of Professional Open Source * Copyright 2008, Red Hat Middleware LLC, and individual contributors * by the @authors tag. See the copyright.txt in the distribution for a * full listing of individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hibernate.validation.util; import java.beans.Introspector; import java.lang.annotation.Annotation; import java.lang.reflect.AccessibleObject; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Member; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.lang.reflect.WildcardType; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.validation.ValidationException; import com.googlecode.jtype.TypeUtils; /** * Some reflection utility methods. * * @author Hardy Ferentschik */ public class ReflectionHelper { /** * Private constructor in order to avoid instantiation. */ private ReflectionHelper() { } @SuppressWarnings("unchecked") public static <T> T getAnnotationParameter(Annotation annotation, String parameterName, Class<T> type) { try { Method m = annotation.getClass().getMethod( parameterName ); Object o = m.invoke( annotation ); if ( o.getClass().getName().equals( type.getName() ) ) { return ( T ) o; } else { String msg = "Wrong parameter type. Expected: " + type.getName() + " Actual: " + o.getClass().getName(); throw new ValidationException( msg ); } } catch ( NoSuchMethodException e ) { String msg = "The specified annotation defines no parameter '" + parameterName + "'."; throw new ValidationException( msg, e ); } catch ( IllegalAccessException e ) { String msg = "Unable to get '" + parameterName + "' from " + annotation.getClass().getName(); throw new ValidationException( msg, e ); } catch ( InvocationTargetException e ) { String msg = "Unable to get '" + parameterName + "' from " + annotation.getClass().getName(); throw new ValidationException( msg, e ); } } /** * Process bean properties getter by applying the JavaBean naming conventions. * * @param member the member for which to get the property name. * * @return The bean method name with the "is" or "get" prefix stripped off, <code>null</code> * the method name id not according to the JavaBeans standard. */ public static String getPropertyName(Member member) { String name = null; if ( member instanceof Field ) { name = member.getName(); } if ( member instanceof Method ) { String methodName = member.getName(); if ( methodName.startsWith( "is" ) ) { name = Introspector.decapitalize( methodName.substring( 2 ) ); } else if ( methodName.startsWith( "get" ) ) { name = Introspector.decapitalize( methodName.substring( 3 ) ); } } return name; } /** * Returns the type of the field of return type of a method. * * @param member the member for which to get the type. * * @return Returns the type of the field of return type of a method. */ public static Class<?> getType(Member member) { Class<?> type = null; if ( member instanceof Field ) { type = ( ( Field ) member ).getType(); } if ( member instanceof Method ) { type = ( ( Method ) member ).getReturnType(); } return type; } /** * Returns the type of the field of return type of a method. * * @param member the member for which to get the type. * * @return Returns the type of the field of return type of a method. */ public static Class<?> getAnnotations(Member member) { Class<?> type = null; if ( member instanceof Field ) { type = ( ( Field ) member ).getType(); } if ( member instanceof Method ) { type = ( ( Method ) member ).getReturnType(); } return type; } /** * @param member The <code>Member</code> instance for which to retrieve the type. * * @return Retrurns the <code>Type</code> of the given <code>Field</code> or <code>Method</code>. * * @throws IllegalArgumentException in case <code>member</code> is not a <code>Field</code> or <code>Method</code>. */ public static Type typeOf(Member member) { if ( member instanceof Field ) { return ( ( Field ) member ).getGenericType(); } if ( member instanceof Method ) { return ( ( Method ) member ).getGenericReturnType(); } throw new IllegalArgumentException( "Member " + member + " is neither a field nor a method" ); } public static Object getValue(Member member, Object object) { Object value = null; if ( member instanceof Method ) { Method method = ( Method ) member; try { value = method.invoke( object ); } catch ( IllegalAccessException e ) { throw new ValidationException( "Unable to access " + method.getName(), e ); } catch ( InvocationTargetException e ) { throw new ValidationException( "Unable to access " + method.getName(), e ); } } else if ( member instanceof Field ) { Field field = ( Field ) member; try { value = field.get( object ); } catch ( IllegalAccessException e ) { throw new ValidationException( "Unable to access " + field.getName(), e ); } } return value; } public static void setAccessibility(Member member) { if ( !Modifier.isPublic( member.getModifiers() ) ) { //Sun's ease of use, sigh... ( ( AccessibleObject ) member ).setAccessible( true ); } } public static Class<?> loadClass(String name, Class<?> caller) throws ClassNotFoundException { try { //try context classloader, if fails try caller classloader ClassLoader loader = Thread.currentThread().getContextClassLoader(); if ( loader != null ) { return loader.loadClass( name ); } } catch ( ClassNotFoundException e ) { //trying caller classloader if ( caller == null ) { throw e; } } return Class.forName( name, true, caller.getClassLoader() ); } /** * Determines the type of elements of an <code>Iterable</code>, array or the value of a <code>Map</code>. * * @param type the type to inspect * * @return Returns the type of elements of an <code>Iterable</code>, array or the value of a <code>Map</code>. <code> * null</code> is returned in case the type is not indexable (in the context of JSR 303). */ public static Type getIndexedType(Type type) { Type indexedType = null; if ( isIterable( type ) && type instanceof ParameterizedType ) { ParameterizedType paramType = ( ParameterizedType ) type; indexedType = paramType.getActualTypeArguments()[0]; } else if ( isMap( type ) && type instanceof ParameterizedType ) { ParameterizedType paramType = ( ParameterizedType ) type; indexedType = paramType.getActualTypeArguments()[1]; } else if ( TypeUtils.isArray( type ) ) { indexedType = TypeUtils.getComponentType( type ); } return indexedType; } /** * @param type the type to check. * * @return Returns <code>true</code> if <code>type</code> is a iterable type, <code>false</code> otherwise. */ public static boolean isIterable(Type type) { if ( type instanceof Class && isIterableClass( ( Class ) type ) ) { return true; } if ( type instanceof ParameterizedType ) { return isIterable( ( ( ParameterizedType ) type ).getRawType() ); } if ( type instanceof WildcardType ) { Type[] upperBounds = ( ( WildcardType ) type ).getUpperBounds(); return upperBounds.length != 0 && isIterable( upperBounds[0] ); } return false; } /** * @param type the type to check. * * @return Returns <code>true</code> if <code>type</code> is implementing <code>Map</code>, <code>false</code> otherwise. */ public static boolean isMap(Type type) { if ( type instanceof Class && isMapClass( ( Class ) type ) ) { return true; } if ( type instanceof ParameterizedType ) { return isMap( ( ( ParameterizedType ) type ).getRawType() ); } if ( type instanceof WildcardType ) { Type[] upperBounds = ( ( WildcardType ) type ).getUpperBounds(); return upperBounds.length != 0 && isMap( upperBounds[0] ); } return false; } /** * Tries to retrieve the indexed value from the specified object. * * @param value The object from which to retrieve the indexed value. The object has to be non <code>null</null> and * either a collection or array. * @param index The index. The index does not have to be numerical. <code>value</code> could also be a map in which * case the index could also be a string key. * * @return The indexed value or <code>null</code> if <code>value</code> is <code>null</code> or not a collection or array. * <code>null</code> is also returned in case the index does not exist. */ public static Object getIndexedValue(Object value, String index) { if ( value == null ) { return null; } // try to create the index int numIndex = -1; try { numIndex = Integer.valueOf( index ); } catch ( NumberFormatException nfe ) { // ignore } if ( numIndex == -1 ) { // must be a map indexed by string Map<?, ?> map = ( Map<?, ?> ) value; //noinspection SuspiciousMethodCalls return map.get( index ); } Iterator<?> iter = null; Type type = value.getClass(); if ( isIterable( type ) ) { iter = ( ( Iterable<?> ) value ).iterator(); } else if ( isMap( type ) ) { Map<?, ?> map = ( Map<?, ?> ) value; iter = map.values().iterator(); } else if ( TypeUtils.isArray( type ) ) { List<?> arrayList = Arrays.asList( value ); iter = arrayList.iterator(); } int i = 0; Object o; while ( iter.hasNext() ) { o = iter.next(); if ( i == numIndex ) { return o; } i++; } return null; } /** * Checks whether the specified class contains a field or property matching the given name. * * @param clazz The class to check. * @param property The property name. * * @return Returns <code>true</code> if the cass contains a field or member for the specified property, <code> * false</code> otherwise. */ public static boolean containsMember(Class<?> clazz, String property) { return containsField( clazz, property ) || containsMethod( clazz, property ); } /** * Checks whether the specified class contains a field matching the specified name. * * @param clazz The class to check. * @param fieldName The field name. * * @return Returns <code>true</code> if the cass contains a field for the specified name, <code> * false</code> otherwise. */ public static boolean containsField(Class<?> clazz, String fieldName) { try { clazz.getDeclaredField( fieldName ); return true; } catch ( NoSuchFieldException e ) { return false; } } /** * Returns the field with the specified name or <code>null</code> if it does not exist. * * @param clazz The class to check. * @param fieldName The field name. * * @return Returns the field with the specified name or <code>null</code> if it does not exist. */ public static Field getField(Class<?> clazz, String fieldName) { try { Field field = clazz.getDeclaredField( fieldName ); setAccessibility( field ); return field; } catch ( NoSuchFieldException e ) { return null; } } /** * Checks whether the specified class contains a method matching the specified name. * * @param clazz The class to check. * @param methodName The method name. * * @return Returns <code>true</code> if the cass contains a property for the specified name, <code> * false</code> otherwise. */ public static boolean containsMethod(Class<?> clazz, String methodName) { try { char string[] = methodName.toCharArray(); string[0] = Character.toUpperCase( string[0] ); methodName = new String( string ); try { clazz.getMethod( "get" + methodName ); } catch ( NoSuchMethodException e ) { clazz.getMethod( "is" + methodName ); } return true; } catch ( NoSuchMethodException e ) { return false; } } /** * Returns the method with the specified name or <code>null</code> if it does not exist. * * @param clazz The class to check. * @param methodName The method name. * * @return Returns the method with the specified name or <code>null</code> if it does not exist. */ public static Method getMethod(Class<?> clazz, String methodName) { try { return clazz.getMethod( "get" + methodName.substring( 0, 1 ).toUpperCase() + methodName.substring( 1 ) ); } catch ( NoSuchMethodException e ) { return null; } } public static Class<?> classForName(String name, Class<?> caller) throws ClassNotFoundException { try { ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); if ( contextClassLoader != null ) { return contextClassLoader.loadClass( name ); } } catch ( Throwable e ) { // ignore } return Class.forName( name, true, caller.getClassLoader() ); } /** * Get all superclasses and interfaces recursively. * * @param clazz The class to start the search with. * @param classes List of classes to which to add all found super classes and interfaces. */ private static void computeClassHierarchy(Class<?> clazz, List<Class<?>> classes) { for ( Class current = clazz; current != null; current = current.getSuperclass() ) { if ( classes.contains( current ) ) { return; } classes.add( current ); for ( Class currentInterface : current.getInterfaces() ) { computeClassHierarchy( currentInterface, classes ); } } } /** * Checks whether the specified class parameter is an instance of a collection class. * * @param clazz <code>Class</code> to check. * * @return <code>true</code> is <code>clazz</code> is instance of a collection class, <code>false</code> otherwise. */ private static boolean isIterableClass(Class<?> clazz) { List<Class<?>> classes = new ArrayList<Class<?>>(); computeClassHierarchy( clazz, classes ); return classes.contains( Iterable.class ); } /** * Checks whether the specified class parameter is an instance of a collection class. * * @param clazz <code>Class</code> to check. * * @return <code>true</code> is <code>clazz</code> is instance of a collection class, <code>false</code> otherwise. */ private static boolean isMapClass(Class<?> clazz) { List<Class<?>> classes = new ArrayList<Class<?>>(); computeClassHierarchy( clazz, classes ); return classes.contains( Map.class ); } }
fix bug with isGetter not retrieved when getMethod is used git-svn-id: 26dda5117cf3d919afec67b81d32a23dad579a1f@16522 1b8cb986-b30d-0410-93ca-fae66ebed9b2
hibernate-validator/src/main/java/org/hibernate/validation/util/ReflectionHelper.java
fix bug with isGetter not retrieved when getMethod is used
<ide><path>ibernate-validator/src/main/java/org/hibernate/validation/util/ReflectionHelper.java <ide> * false</code> otherwise. <ide> */ <ide> public static boolean containsMethod(Class<?> clazz, String methodName) { <add> return getMethod( clazz, methodName ) != null; <add> } <add> <add> /** <add> * Returns the method with the specified name or <code>null</code> if it does not exist. <add> * <add> * @param clazz The class to check. <add> * @param methodName The method name. <add> * <add> * @return Returns the method with the specified name or <code>null</code> if it does not exist. <add> */ <add> public static Method getMethod(Class<?> clazz, String methodName) { <ide> try { <ide> char string[] = methodName.toCharArray(); <ide> string[0] = Character.toUpperCase( string[0] ); <ide> methodName = new String( string ); <ide> try { <del> clazz.getMethod( "get" + methodName ); <add> return clazz.getMethod( "get" + methodName ); <ide> } <ide> catch ( NoSuchMethodException e ) { <del> clazz.getMethod( "is" + methodName ); <del> } <del> return true; <del> } <del> catch ( NoSuchMethodException e ) { <del> return false; <del> } <del> } <del> <del> /** <del> * Returns the method with the specified name or <code>null</code> if it does not exist. <del> * <del> * @param clazz The class to check. <del> * @param methodName The method name. <del> * <del> * @return Returns the method with the specified name or <code>null</code> if it does not exist. <del> */ <del> public static Method getMethod(Class<?> clazz, String methodName) { <del> try { <del> return clazz.getMethod( "get" + methodName.substring( 0, 1 ).toUpperCase() + methodName.substring( 1 ) ); <add> return clazz.getMethod( "is" + methodName ); <add> } <ide> } <ide> catch ( NoSuchMethodException e ) { <ide> return null;
JavaScript
mit
769a7841978011a0aea9059c9a16b2687b5820ce
0
berkley/spark,berkley/spark,berkley/spark,berkley/spark
var express = require('express'); var lights = require('./routes/lights'); var water = require('./routes/water'); var index = require('./routes/index'); var freddy = require('./routes/freddy'); var camp = require('./routes/camp'); //control the camp fire puffers var security = require('./routes/security'); var http = require('http'); var path = require('path'); var nconf = require('nconf'); var fs = require('fs'); var sockets = require('./websocket/sockets'); sockets.setConfig(nconf); var app = express(); nconf.argv() .env() .file({file: 'config.json'}); lights.setConfig(nconf); water.setConfig(nconf); security.setConfig(nconf); app.set('port', process.env.PORT || nconf.get("port") || 3000); app.set('views', path.join(__dirname, 'views')); app.set('view engine', 'ejs'); app.use(express.favicon()); app.use(express.logger('dev')); app.use(express.json()); app.use(express.urlencoded()); app.use(express.methodOverride()); app.use(app.router); app.use(express.static(path.join(__dirname, 'public'))); app.get('/', index.index); app.get('/lights', lights.index); app.get('/water', water.index); app.get('/security', security.index); app.get('/house/run/action', lights.action); app.get('/house/params/:coreId', lights.params); //get the current param state for the given coreId; app.get('/water/run/action', water.action); app.get('/freddy', freddy.index); app.get('/freddy/run/action', freddy.action); app.get('/camp', camp.index); var server = http.createServer(app).listen(app.get('port'), function(){ console.log('Express server listening on port ' + app.get('port')); });
node/app.js
var express = require('express'); var lights = require('./routes/lights'); var water = require('./routes/water'); var index = require('./routes/index'); var freddy = require('./routes/freddy'); var camp = require('./routes/camp'); //control the camp fire puffers var security = require('./routes/security'); var http = require('http'); var path = require('path'); var nconf = require('nconf'); var fs = require('fs'); var sockets = require('./websocket/sockets'); sockets.setConfig(nconf); var app = express(); nconf.argv() .env() .file({file: 'config.json'}); lights.setConfig(nconf); water.setConfig(nconf); security.setConfig(nconf); app.set('port', nconf.get("port") || process.env.PORT || 3000); app.set('views', path.join(__dirname, 'views')); app.set('view engine', 'ejs'); app.use(express.favicon()); app.use(express.logger('dev')); app.use(express.json()); app.use(express.urlencoded()); app.use(express.methodOverride()); app.use(app.router); app.use(express.static(path.join(__dirname, 'public'))); app.get('/', index.index); app.get('/lights', lights.index); app.get('/water', water.index); app.get('/security', security.index); app.get('/house/run/action', lights.action); app.get('/house/params/:coreId', lights.params); //get the current param state for the given coreId; app.get('/water/run/action', water.action); app.get('/freddy', freddy.index); app.get('/freddy/run/action', freddy.action); app.get('/camp', camp.index); var server = http.createServer(app).listen(app.get('port'), function(){ console.log('Express server listening on port ' + app.get('port')); });
changed port order
node/app.js
changed port order
<ide><path>ode/app.js <ide> water.setConfig(nconf); <ide> security.setConfig(nconf); <ide> <del>app.set('port', nconf.get("port") || process.env.PORT || 3000); <add>app.set('port', process.env.PORT || nconf.get("port") || 3000); <ide> app.set('views', path.join(__dirname, 'views')); <ide> app.set('view engine', 'ejs'); <ide> app.use(express.favicon());
JavaScript
mit
2b3e5f0386541492ba241090e9c00a5502534360
0
infinitered/reactotron,infinitered/reactotron,infinitered/reactotron,reactotron/reactotron,reactotron/reactotron,reactotron/reactotron,reactotron/reactotron,infinitered/reactotron,reactotron/reactotron
import React, { Component } from "react" import PropTypes from "prop-types" import Command from "../Shared/Command" import Content from "../Shared/Content" const COMMAND_TITLE = "DISPLAY" const Styles = { imageContainer: {}, image: { maxWidth: "100%", maxHeight: "100%", }, } class DisplayCommand extends Component { static propTypes = { command: PropTypes.object.isRequired, } shouldComponentUpdate(nextProps) { return this.props.command.id !== nextProps.command.id } render() { const { command } = this.props const { payload, important } = command const { name, value, image, preview } = payload return ( <Command {...this.props} title={name || COMMAND_TITLE} important={important} preview={preview} > {value && <Content value={value} />} {image && ( <div style={Styles.imageContainer}> <img style={Styles.image} src={image} /> </div> )} </Command> ) } } export default DisplayCommand
packages/reactotron-app/App/Commands/DisplayCommand.js
import React, { Component } from "react" import PropTypes from "prop-types" import Command from "../Shared/Command" import Content from "../Shared/Content" const COMMAND_TITLE = "DISPLAY" const Styles = { imageContainer: {}, image: { maxWidth: "100%", maxHeight: "100%", }, } class DisplayCommand extends Component { static propTypes = { command: PropTypes.object.isRequired, } shouldComponentUpdate(nextProps) { return this.props.command.id !== nextProps.command.id } render() { const { command } = this.props const { payload, important } = command const { name, value, image, preview } = payload return ( <Command {...this.props} title={name || COMMAND_TITLE} important={important} preview={preview} > {value && <Content value={value} />} {image && ( <div style={Styles.imageContainer}> <img style={Styles.image} src={image.uri} /> </div> )} </Command> ) } } export default DisplayCommand
�� Fixes images not being shown in display calls
packages/reactotron-app/App/Commands/DisplayCommand.js
�� Fixes images not being shown in display calls
<ide><path>ackages/reactotron-app/App/Commands/DisplayCommand.js <ide> {value && <Content value={value} />} <ide> {image && ( <ide> <div style={Styles.imageContainer}> <del> <img style={Styles.image} src={image.uri} /> <add> <img style={Styles.image} src={image} /> <ide> </div> <ide> )} <ide> </Command>
Java
apache-2.0
a74dfe9cacf078fddf48fc1b49bcb27709c9c9d8
0
paritytrading/philadelphia,paritytrading/philadelphia,paritytrading/philadelphia
package com.paritytrading.philadelphia; import static com.paritytrading.philadelphia.FIX.*; import static com.paritytrading.philadelphia.FIXMsgTypes.*; import static com.paritytrading.philadelphia.FIXSessionRejectReasons.*; import static com.paritytrading.philadelphia.FIXTags.*; import java.io.Closeable; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.SocketChannel; import org.joda.time.DateTimeZone; import org.joda.time.MutableDateTime; /** * A connection. */ public class FIXConnection implements Closeable { private final Clock clock; private final SocketChannel channel; private final FIXConfig config; private final FIXValue bodyLength; private final FIXValue checkSum; private String senderCompId; private String targetCompId; private long rxMsgSeqNum; private long txMsgSeqNum; private final ByteBuffer rxBuffer; private final ByteBuffer txHeaderBuffer; private final int bodyLengthOffset; private final ByteBuffer txBodyBuffer; private final ByteBuffer[] txBuffers; /* * This variable is written on data reception and read on connection * keep-alive. These two functions can run on different threads * without locking. */ private volatile long lastRxMillis; /* * This variable is written on data transmission and read on connection * keep-alive. These two functions can run on different threads but * require locking. */ private long lastTxMillis; private long testRequestTxMillis; private final long heartbeatMillis; private final long testRequestMillis; private final FIXMessageParser parser; private final FIXConnectionStatusListener statusListener; private final FIXMessage txMessage; private long currentTimeMillis; private final MutableDateTime currentTime; private final StringBuilder currentTimestamp; /** * Create a connection. The underlying socket channel can be either * blocking or non-blocking. * * @param clock the clock * @param channel the underlying socket channel * @param config the connection configuration * @param listener the inbound message listener * @param statusListener the inbound status event listener */ public FIXConnection(Clock clock, SocketChannel channel, FIXConfig config, FIXMessageListener listener, FIXConnectionStatusListener statusListener) { this.clock = clock; this.channel = channel; this.config = config; this.bodyLength = new FIXValue(BODY_LENGTH_FIELD_CAPACITY); this.checkSum = new FIXValue(CHECK_SUM_FIELD_CAPACITY); this.senderCompId = config.getSenderCompID(); this.targetCompId = config.getTargetCompID(); this.parser = new FIXMessageParser(config, new MessageHandler(listener)); this.statusListener = statusListener; this.rxMsgSeqNum = config.getIncomingMsgSeqNum(); this.txMsgSeqNum = config.getOutgoingMsgSeqNum(); this.rxBuffer = ByteBuffer.allocateDirect(config.getRxBufferCapacity()); this.txHeaderBuffer = ByteBuffer.allocateDirect(config.getTxBufferCapacity()); FIXValue beginString = new FIXValue(BEGIN_STRING_FIELD_CAPACITY); beginString.setString(config.getVersion().getBeginString()); this.txHeaderBuffer.put(BEGIN_STRING); beginString.put(this.txHeaderBuffer); this.txHeaderBuffer.put(BODY_LENGTH); this.bodyLengthOffset = this.txHeaderBuffer.position(); this.txBodyBuffer = ByteBuffer.allocateDirect(config.getTxBufferCapacity()); this.txBuffers = new ByteBuffer[2]; this.txBuffers[0] = txHeaderBuffer; this.txBuffers[1] = txBodyBuffer; this.lastRxMillis = clock.currentTimeMillis(); this.lastTxMillis = clock.currentTimeMillis(); this.heartbeatMillis = config.getHeartBtInt() * 1000; this.testRequestMillis = config.getHeartBtInt() * 1100; this.testRequestTxMillis = 0; this.txMessage = new FIXMessage(config.getMaxFieldCount(), config.getFieldCapacity()); this.currentTimeMillis = clock.currentTimeMillis(); this.currentTime = new MutableDateTime(this.currentTimeMillis, DateTimeZone.UTC); this.currentTimestamp = new StringBuilder(config.getFieldCapacity()); FIXTimestamps.append(this.currentTime, this.currentTimestamp); } /** * Create a connection. The underlying socket channel can be either * blocking or non-blocking. * * @param channel the underlying socket channel * @param config the connection configuration * @param listener the inbound message listener * @param statusListener the inbound status event listener */ public FIXConnection(SocketChannel channel, FIXConfig config, FIXMessageListener listener, FIXConnectionStatusListener statusListener) { this(System::currentTimeMillis, channel, config, listener, statusListener); } /** * Get the underlying socket channel. * * @return the underlying socket channel */ public SocketChannel getChannel() { return channel; } /** * Get the next incoming MsgSeqNum(34). * * @return the next incoming MsgSeqNum(34) */ public long getIncomingMsgSeqNum() { return rxMsgSeqNum; } void setIncomingMsgSeqNum(long incomingMsgSeqNum) { rxMsgSeqNum = incomingMsgSeqNum; } /** * Get the next outgoing MsgSeqNum(34). * * @return the next outgoing MsgSeqNum(34) */ public long getOutgoingMsgSeqNum() { return txMsgSeqNum; } /** * Get the SenderCompID(49). * * @return the SenderCompID(49) */ public String getSenderCompID() { return senderCompId; } /** * Get the TargetCompID(56). * * @return the TargetCompID(56) */ public String getTargetCompID() { return targetCompId; } /** * Create a message container. * * @return a message container */ public FIXMessage create() { return new FIXMessage(config.getMaxFieldCount(), config.getFieldCapacity()); } /** * <p>Prepare a message. When preparing a message, the following mandatory * fields are added:</p> * * <ul> * <li>MsgType(35)</li> * <li>SenderCompID(49)</li> * <li>TargetCompID(56)</li> * <li>MsgSeqNum(34)</li> * <li>SendingTime(52)</li> * </ul> * * @param message a message * @param msgType the MsgType(35) */ public void prepare(FIXMessage message, char msgType) { message.reset(); message.addField(MsgType).setChar(msgType); prepare(message); } /** * <p>Prepare a message.</p> * * @param message a message * @param msgType the MsgType(35) * @see #prepare(FIXMessage, char) */ public void prepare(FIXMessage message, CharSequence msgType) { message.reset(); message.addField(MsgType).setString(msgType); prepare(message); } private void prepare(FIXMessage message) { message.addField(SenderCompID).setString(senderCompId); message.addField(TargetCompID).setString(targetCompId); message.addField(MsgSeqNum).setInt(txMsgSeqNum); message.addField(SendingTime).setString(currentTimestamp); } /** * <p>Update a message. When updating a message, the following mandatory * fields are updated:</p> * * <ul> * <li>MsgSeqNum(34)</li> * <li>SendingTime(52)</li> * </ul> * * @param message a message * @throws NullPointerException if MsgSeqNum(34) or SendingTime(52) is * not found */ public void update(FIXMessage message) { message.valueOf(MsgSeqNum).setInt(txMsgSeqNum); message.valueOf(SendingTime).setString(currentTimestamp); } /** * Update SenderCompID(49) and TargetCompID(56). * * @param message a message * @throws NullPointerException if SenderCompID(49) or TargetCompID(56) * is not found */ public void updateCompID(FIXMessage message) { message.valueOf(SenderCompID).setString(senderCompId); message.valueOf(TargetCompID).setString(targetCompId); } /** * <p>Update the current timestamp. The current timestamp is used for the * following purposes:</p> * * <ul> * <li>SendingTime(52)</li> * <li>the connection keep-alive mechanism</li> * </ul> */ public void updateCurrentTimestamp() { currentTimeMillis = clock.currentTimeMillis(); currentTime.setMillis(currentTimeMillis); currentTimestamp.setLength(0); FIXTimestamps.append(currentTime, currentTimestamp); } /** * Get the current timestamp. * * @return the current timestamp */ public CharSequence getCurrentTimestamp() { return currentTimestamp; } /** * Keep this connection alive. * * <p>If the duration indicated by HeartBtInt(108) has passed since * sending a message, send a Heartbeat(0) message.</p> * * <p>If the duration indicated by HeartBtInt(108) amended with a * reasonable transmission time has passed since receiving a message, * send a TestRequest(1) message.</p> * * <p>If a TestRequest(1) message has been sent and no data has been * received within the duration indicated by HeartBtInt(108) amended with * a reasonable transmission time, trigger a status event indicating * heartbeat timeout.</p> * * @throws IOException if an I/O error occurs */ public void keepAlive() throws IOException { if (currentTimeMillis - lastTxMillis > heartbeatMillis) sendHeartbeat(); if (testRequestTxMillis == 0) { if (currentTimeMillis - lastRxMillis > testRequestMillis) { sendTestRequest(currentTimestamp); testRequestTxMillis = currentTimeMillis; } } else { if (currentTimeMillis - testRequestTxMillis > testRequestMillis) { statusListener.heartbeatTimeout(this); testRequestTxMillis = 0; } } } /** * Close the underlying socket channel. * * @throws IOException if an I/O error occurs */ @Override public void close() throws IOException { channel.close(); } /** * Receive data from the underlying socket channel. For each message * received, invoke the message listener if applicable. * * @return the number of bytes read, possibly zero, or -1 if the channel * has reached end-of-stream * @throws IOException if an I/O error occurs */ public int receive() throws IOException { int bytes = channel.read(rxBuffer); if (bytes <= 0) return bytes; rxBuffer.flip(); while (parser.parse(rxBuffer)); rxBuffer.compact(); if (rxBuffer.position() == rxBuffer.capacity()) tooLongMessage(); lastRxMillis = currentTimeMillis; testRequestTxMillis = 0; return bytes; } /** * Send a message. * * @param message a message * @throws IOException if an I/O error occurs */ public void send(FIXMessage message) throws IOException { txBodyBuffer.clear(); message.put(txBodyBuffer); bodyLength.setInt(txBodyBuffer.position()); txHeaderBuffer.position(bodyLengthOffset); bodyLength.put(txHeaderBuffer); checkSum.setCheckSum(FIXCheckSums.sum(txHeaderBuffer, 0, txHeaderBuffer.position()) + FIXCheckSums.sum(txBodyBuffer, 0, txBodyBuffer.position())); txBodyBuffer.put(CHECK_SUM); checkSum.put(txBodyBuffer); txHeaderBuffer.flip(); txBodyBuffer.flip(); int remaining = txHeaderBuffer.remaining() + txBodyBuffer.remaining(); do { remaining -= channel.write(txBuffers, 0, txBuffers.length); } while (remaining > 0); txMsgSeqNum++; lastTxMillis = currentTimeMillis; } /** * Send a Reject(3) message. * * @param refSeqNum the RefSeqNum(45) * @param sessionRejectReason the SessionRejectReason(373) * @param text the Text(58) * @throws IOException if an I/O error occurs */ public void sendReject(long refSeqNum, long sessionRejectReason, CharSequence text) throws IOException { prepare(txMessage, Reject); txMessage.addField(RefSeqNum).setInt(refSeqNum); txMessage.addField(SessionRejectReason).setInt(sessionRejectReason); txMessage.addField(Text).setString(text); send(txMessage); } /** * Send a Logout(5) message. * * @throws IOException if an I/O error occurs */ public void sendLogout() throws IOException { prepare(txMessage, Logout); send(txMessage); } /** * Send a Logout(5) message. * * @param text the Text(58) * @throws IOException if an I/O error occurs */ public void sendLogout(CharSequence text) throws IOException { prepare(txMessage, Logout); txMessage.addField(Text).setString(text); send(txMessage); } /** * Send a Logon(A) message. Set EncryptMethod(98) to 0 and HeartBtInt(108) * according to the connection configuration. * * @param resetSeqNum if true set ResetSeqNumFlag(141) to true, otherwise * omit ResetSeqNumFlag(141) * @throws IOException if an I/O error occurs */ public void sendLogon(boolean resetSeqNum) throws IOException { prepare(txMessage, Logon); txMessage.addField(EncryptMethod).setInt(0); txMessage.addField(HeartBtInt).setInt(config.getHeartBtInt()); if (resetSeqNum) txMessage.addField(ResetSeqNumFlag).setBoolean(true); send(txMessage); } private class MessageHandler implements FIXMessageListener { private FIXMessageListener downstream; MessageHandler(FIXMessageListener downstream) { this.downstream = downstream; } @Override public void message(FIXMessage message) throws IOException { long msgSeqNum = message.getMsgSeqNum(); if (msgSeqNum == 0) { msgSeqNumNotFound(); return; } FIXValue msgType = message.getMsgType(); if (msgType == null) { msgTypeNotFound(); return; } if (msgType.length() == 1 && msgType.byteAt(0) == SequenceReset) { if (handleSequenceReset(message)) return; } if (msgSeqNum != rxMsgSeqNum) { handleMsgSeqNum(message, msgType, msgSeqNum); return; } rxMsgSeqNum++; if (msgType.length() != 1) { downstream.message(message); return; } switch (msgType.byteAt(0)) { case Heartbeat: break; case TestRequest: handleTestRequest(message); break; case ResendRequest: handleResendRequest(message); break; case Reject: handleReject(message); break; case SequenceReset: handleSequenceReset(message); break; case Logout: handleLogout(message); break; case Logon: handleLogon(message); break; default: downstream.message(message); break; } } private void handleMsgSeqNum(FIXMessage message, FIXValue msgType, long msgSeqNum) throws IOException { if (msgSeqNum < rxMsgSeqNum) handleTooLowMsgSeqNum(message, msgType, msgSeqNum); else sendResendRequest(rxMsgSeqNum); } private void handleTooLowMsgSeqNum(FIXMessage message, FIXValue msgType, long msgSeqNum) throws IOException { if (msgType.length() != 1 || msgType.asChar() != SequenceReset) { FIXValue possDupFlag = message.valueOf(PossDupFlag); if (possDupFlag == null || !possDupFlag.asBoolean()) statusListener.tooLowMsgSeqNum(FIXConnection.this, msgSeqNum, rxMsgSeqNum); } } private void handleTestRequest(FIXMessage message) throws IOException { FIXValue testReqId = message.valueOf(TestReqID); if (testReqId == null) { sendReject(message.getMsgSeqNum(), RequiredTagMissing, "TestReqID(112) not found"); return; } sendHeartbeat(testReqId); } private void handleResendRequest(FIXMessage message) throws IOException { FIXValue beginSeqNo = message.valueOf(BeginSeqNo); if (beginSeqNo == null) { sendReject(message.getMsgSeqNum(), RequiredTagMissing, "BeginSeqNo(7) not found"); return; } FIXValue endSeqNo = message.valueOf(EndSeqNo); if (endSeqNo == null) { sendReject(message.getMsgSeqNum(), RequiredTagMissing, "EndSeqNo(16) not found"); return; } sendSequenceReset(beginSeqNo, endSeqNo.asInt() + 1); } private void handleReject(FIXMessage message) throws IOException { statusListener.reject(FIXConnection.this, message); } private boolean handleSequenceReset(FIXMessage message) throws IOException { FIXValue value = message.valueOf(NewSeqNo); if (value == null) { sendReject(message.getMsgSeqNum(), RequiredTagMissing, "NewSeqNo(36) not found"); return true; } long newSeqNo = value.asInt(); if (newSeqNo < rxMsgSeqNum) { sendReject(message.getMsgSeqNum(), ValueIsIncorrect, "NewSeqNo(36) too low"); return true; } rxMsgSeqNum = newSeqNo; FIXValue gapFillFlag = message.valueOf(GapFillFlag); boolean reset = gapFillFlag == null || !gapFillFlag.asBoolean(); if (reset) statusListener.sequenceReset(FIXConnection.this); return reset; } private void handleLogout(FIXMessage message) throws IOException { statusListener.logout(FIXConnection.this, message); } private void handleLogon(FIXMessage message) throws IOException { if (senderCompId.isEmpty()) { FIXValue value = message.valueOf(TargetCompID); if (value == null) { statusListener.close(FIXConnection.this, "SenderCompID(49) not found"); return; } senderCompId = value.asString(); } if (targetCompId.isEmpty()) { FIXValue value = message.valueOf(SenderCompID); if (value == null) { statusListener.close(FIXConnection.this, "TargetCompID(56) not found"); return; } targetCompId = value.asString(); } statusListener.logon(FIXConnection.this, message); } private void sendHeartbeat(FIXValue testReqId) throws IOException { prepare(txMessage, Heartbeat); txMessage.addField(TestReqID).set(testReqId); send(txMessage); } private void sendResendRequest(long beginSeqNo) throws IOException { prepare(txMessage, ResendRequest); txMessage.addField(BeginSeqNo).setInt(beginSeqNo); txMessage.addField(EndSeqNo).setInt(0); send(txMessage); } private void sendSequenceReset(FIXValue msgSeqNum, long newSeqNo) throws IOException { prepare(txMessage, SequenceReset); txMessage.valueOf(MsgSeqNum).set(msgSeqNum); txMessage.addField(GapFillFlag).setBoolean(true); txMessage.addField(NewSeqNo).setInt(newSeqNo); send(txMessage); } private void msgSeqNumNotFound() throws IOException { sendLogout("MsgSeqNum(34) not found"); } private void msgTypeNotFound() throws IOException { statusListener.close(FIXConnection.this, "MsgType(35) not found"); } } private void sendHeartbeat() throws IOException { prepare(txMessage, Heartbeat); send(txMessage); } private void sendTestRequest(CharSequence testReqId) throws IOException { prepare(txMessage, TestRequest); txMessage.addField(TestReqID).setString(testReqId); send(txMessage); } private static void tooLongMessage() throws FIXMessageOverflowException { throw new FIXMessageOverflowException("Too long message"); } }
libraries/core/src/main/java/com/paritytrading/philadelphia/FIXConnection.java
package com.paritytrading.philadelphia; import static com.paritytrading.philadelphia.FIX.*; import static com.paritytrading.philadelphia.FIXMsgTypes.*; import static com.paritytrading.philadelphia.FIXSessionRejectReasons.*; import static com.paritytrading.philadelphia.FIXTags.*; import java.io.Closeable; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.SocketChannel; import org.joda.time.DateTimeZone; import org.joda.time.MutableDateTime; /** * A connection. */ public class FIXConnection implements Closeable { private Clock clock; private SocketChannel channel; private FIXConfig config; private FIXValue bodyLength; private FIXValue checkSum; private String senderCompId; private String targetCompId; private long rxMsgSeqNum; private long txMsgSeqNum; private ByteBuffer rxBuffer; private ByteBuffer txHeaderBuffer; private int bodyLengthOffset; private ByteBuffer txBodyBuffer; private ByteBuffer[] txBuffers; /* * This variable is written on data reception and read on connection * keep-alive. These two functions can run on different threads * without locking. */ private volatile long lastRxMillis; /* * This variable is written on data transmission and read on connection * keep-alive. These two functions can run on different threads but * require locking. */ private long lastTxMillis; private long testRequestTxMillis; private final long heartbeatMillis; private final long testRequestMillis; private FIXMessageParser parser; private FIXConnectionStatusListener statusListener; private FIXMessage txMessage; private long currentTimeMillis; private MutableDateTime currentTime; private StringBuilder currentTimestamp; /** * Create a connection. The underlying socket channel can be either * blocking or non-blocking. * * @param clock the clock * @param channel the underlying socket channel * @param config the connection configuration * @param listener the inbound message listener * @param statusListener the inbound status event listener */ public FIXConnection(Clock clock, SocketChannel channel, FIXConfig config, FIXMessageListener listener, FIXConnectionStatusListener statusListener) { this.clock = clock; this.channel = channel; this.config = config; this.bodyLength = new FIXValue(BODY_LENGTH_FIELD_CAPACITY); this.checkSum = new FIXValue(CHECK_SUM_FIELD_CAPACITY); this.senderCompId = config.getSenderCompID(); this.targetCompId = config.getTargetCompID(); this.parser = new FIXMessageParser(config, new MessageHandler(listener)); this.statusListener = statusListener; this.rxMsgSeqNum = config.getIncomingMsgSeqNum(); this.txMsgSeqNum = config.getOutgoingMsgSeqNum(); this.rxBuffer = ByteBuffer.allocateDirect(config.getRxBufferCapacity()); this.txHeaderBuffer = ByteBuffer.allocateDirect(config.getTxBufferCapacity()); FIXValue beginString = new FIXValue(BEGIN_STRING_FIELD_CAPACITY); beginString.setString(config.getVersion().getBeginString()); this.txHeaderBuffer.put(BEGIN_STRING); beginString.put(this.txHeaderBuffer); this.txHeaderBuffer.put(BODY_LENGTH); this.bodyLengthOffset = this.txHeaderBuffer.position(); this.txBodyBuffer = ByteBuffer.allocateDirect(config.getTxBufferCapacity()); this.txBuffers = new ByteBuffer[2]; this.txBuffers[0] = txHeaderBuffer; this.txBuffers[1] = txBodyBuffer; this.lastRxMillis = clock.currentTimeMillis(); this.lastTxMillis = clock.currentTimeMillis(); this.heartbeatMillis = config.getHeartBtInt() * 1000; this.testRequestMillis = config.getHeartBtInt() * 1100; this.testRequestTxMillis = 0; this.txMessage = new FIXMessage(config.getMaxFieldCount(), config.getFieldCapacity()); this.currentTimeMillis = clock.currentTimeMillis(); this.currentTime = new MutableDateTime(this.currentTimeMillis, DateTimeZone.UTC); this.currentTimestamp = new StringBuilder(config.getFieldCapacity()); FIXTimestamps.append(this.currentTime, this.currentTimestamp); } /** * Create a connection. The underlying socket channel can be either * blocking or non-blocking. * * @param channel the underlying socket channel * @param config the connection configuration * @param listener the inbound message listener * @param statusListener the inbound status event listener */ public FIXConnection(SocketChannel channel, FIXConfig config, FIXMessageListener listener, FIXConnectionStatusListener statusListener) { this(System::currentTimeMillis, channel, config, listener, statusListener); } /** * Get the underlying socket channel. * * @return the underlying socket channel */ public SocketChannel getChannel() { return channel; } /** * Get the next incoming MsgSeqNum(34). * * @return the next incoming MsgSeqNum(34) */ public long getIncomingMsgSeqNum() { return rxMsgSeqNum; } void setIncomingMsgSeqNum(long incomingMsgSeqNum) { rxMsgSeqNum = incomingMsgSeqNum; } /** * Get the next outgoing MsgSeqNum(34). * * @return the next outgoing MsgSeqNum(34) */ public long getOutgoingMsgSeqNum() { return txMsgSeqNum; } /** * Get the SenderCompID(49). * * @return the SenderCompID(49) */ public String getSenderCompID() { return senderCompId; } /** * Get the TargetCompID(56). * * @return the TargetCompID(56) */ public String getTargetCompID() { return targetCompId; } /** * Create a message container. * * @return a message container */ public FIXMessage create() { return new FIXMessage(config.getMaxFieldCount(), config.getFieldCapacity()); } /** * <p>Prepare a message. When preparing a message, the following mandatory * fields are added:</p> * * <ul> * <li>MsgType(35)</li> * <li>SenderCompID(49)</li> * <li>TargetCompID(56)</li> * <li>MsgSeqNum(34)</li> * <li>SendingTime(52)</li> * </ul> * * @param message a message * @param msgType the MsgType(35) */ public void prepare(FIXMessage message, char msgType) { message.reset(); message.addField(MsgType).setChar(msgType); prepare(message); } /** * <p>Prepare a message.</p> * * @param message a message * @param msgType the MsgType(35) * @see #prepare(FIXMessage, char) */ public void prepare(FIXMessage message, CharSequence msgType) { message.reset(); message.addField(MsgType).setString(msgType); prepare(message); } private void prepare(FIXMessage message) { message.addField(SenderCompID).setString(senderCompId); message.addField(TargetCompID).setString(targetCompId); message.addField(MsgSeqNum).setInt(txMsgSeqNum); message.addField(SendingTime).setString(currentTimestamp); } /** * <p>Update a message. When updating a message, the following mandatory * fields are updated:</p> * * <ul> * <li>MsgSeqNum(34)</li> * <li>SendingTime(52)</li> * </ul> * * @param message a message * @throws NullPointerException if MsgSeqNum(34) or SendingTime(52) is * not found */ public void update(FIXMessage message) { message.valueOf(MsgSeqNum).setInt(txMsgSeqNum); message.valueOf(SendingTime).setString(currentTimestamp); } /** * Update SenderCompID(49) and TargetCompID(56). * * @param message a message * @throws NullPointerException if SenderCompID(49) or TargetCompID(56) * is not found */ public void updateCompID(FIXMessage message) { message.valueOf(SenderCompID).setString(senderCompId); message.valueOf(TargetCompID).setString(targetCompId); } /** * <p>Update the current timestamp. The current timestamp is used for the * following purposes:</p> * * <ul> * <li>SendingTime(52)</li> * <li>the connection keep-alive mechanism</li> * </ul> */ public void updateCurrentTimestamp() { currentTimeMillis = clock.currentTimeMillis(); currentTime.setMillis(currentTimeMillis); currentTimestamp.setLength(0); FIXTimestamps.append(currentTime, currentTimestamp); } /** * Get the current timestamp. * * @return the current timestamp */ public CharSequence getCurrentTimestamp() { return currentTimestamp; } /** * Keep this connection alive. * * <p>If the duration indicated by HeartBtInt(108) has passed since * sending a message, send a Heartbeat(0) message.</p> * * <p>If the duration indicated by HeartBtInt(108) amended with a * reasonable transmission time has passed since receiving a message, * send a TestRequest(1) message.</p> * * <p>If a TestRequest(1) message has been sent and no data has been * received within the duration indicated by HeartBtInt(108) amended with * a reasonable transmission time, trigger a status event indicating * heartbeat timeout.</p> * * @throws IOException if an I/O error occurs */ public void keepAlive() throws IOException { if (currentTimeMillis - lastTxMillis > heartbeatMillis) sendHeartbeat(); if (testRequestTxMillis == 0) { if (currentTimeMillis - lastRxMillis > testRequestMillis) { sendTestRequest(currentTimestamp); testRequestTxMillis = currentTimeMillis; } } else { if (currentTimeMillis - testRequestTxMillis > testRequestMillis) { statusListener.heartbeatTimeout(this); testRequestTxMillis = 0; } } } /** * Close the underlying socket channel. * * @throws IOException if an I/O error occurs */ @Override public void close() throws IOException { channel.close(); } /** * Receive data from the underlying socket channel. For each message * received, invoke the message listener if applicable. * * @return the number of bytes read, possibly zero, or -1 if the channel * has reached end-of-stream * @throws IOException if an I/O error occurs */ public int receive() throws IOException { int bytes = channel.read(rxBuffer); if (bytes <= 0) return bytes; rxBuffer.flip(); while (parser.parse(rxBuffer)); rxBuffer.compact(); if (rxBuffer.position() == rxBuffer.capacity()) tooLongMessage(); lastRxMillis = currentTimeMillis; testRequestTxMillis = 0; return bytes; } /** * Send a message. * * @param message a message * @throws IOException if an I/O error occurs */ public void send(FIXMessage message) throws IOException { txBodyBuffer.clear(); message.put(txBodyBuffer); bodyLength.setInt(txBodyBuffer.position()); txHeaderBuffer.position(bodyLengthOffset); bodyLength.put(txHeaderBuffer); checkSum.setCheckSum(FIXCheckSums.sum(txHeaderBuffer, 0, txHeaderBuffer.position()) + FIXCheckSums.sum(txBodyBuffer, 0, txBodyBuffer.position())); txBodyBuffer.put(CHECK_SUM); checkSum.put(txBodyBuffer); txHeaderBuffer.flip(); txBodyBuffer.flip(); int remaining = txHeaderBuffer.remaining() + txBodyBuffer.remaining(); do { remaining -= channel.write(txBuffers, 0, txBuffers.length); } while (remaining > 0); txMsgSeqNum++; lastTxMillis = currentTimeMillis; } /** * Send a Reject(3) message. * * @param refSeqNum the RefSeqNum(45) * @param sessionRejectReason the SessionRejectReason(373) * @param text the Text(58) * @throws IOException if an I/O error occurs */ public void sendReject(long refSeqNum, long sessionRejectReason, CharSequence text) throws IOException { prepare(txMessage, Reject); txMessage.addField(RefSeqNum).setInt(refSeqNum); txMessage.addField(SessionRejectReason).setInt(sessionRejectReason); txMessage.addField(Text).setString(text); send(txMessage); } /** * Send a Logout(5) message. * * @throws IOException if an I/O error occurs */ public void sendLogout() throws IOException { prepare(txMessage, Logout); send(txMessage); } /** * Send a Logout(5) message. * * @param text the Text(58) * @throws IOException if an I/O error occurs */ public void sendLogout(CharSequence text) throws IOException { prepare(txMessage, Logout); txMessage.addField(Text).setString(text); send(txMessage); } /** * Send a Logon(A) message. Set EncryptMethod(98) to 0 and HeartBtInt(108) * according to the connection configuration. * * @param resetSeqNum if true set ResetSeqNumFlag(141) to true, otherwise * omit ResetSeqNumFlag(141) * @throws IOException if an I/O error occurs */ public void sendLogon(boolean resetSeqNum) throws IOException { prepare(txMessage, Logon); txMessage.addField(EncryptMethod).setInt(0); txMessage.addField(HeartBtInt).setInt(config.getHeartBtInt()); if (resetSeqNum) txMessage.addField(ResetSeqNumFlag).setBoolean(true); send(txMessage); } private class MessageHandler implements FIXMessageListener { private FIXMessageListener downstream; MessageHandler(FIXMessageListener downstream) { this.downstream = downstream; } @Override public void message(FIXMessage message) throws IOException { long msgSeqNum = message.getMsgSeqNum(); if (msgSeqNum == 0) { msgSeqNumNotFound(); return; } FIXValue msgType = message.getMsgType(); if (msgType == null) { msgTypeNotFound(); return; } if (msgType.length() == 1 && msgType.byteAt(0) == SequenceReset) { if (handleSequenceReset(message)) return; } if (msgSeqNum != rxMsgSeqNum) { handleMsgSeqNum(message, msgType, msgSeqNum); return; } rxMsgSeqNum++; if (msgType.length() != 1) { downstream.message(message); return; } switch (msgType.byteAt(0)) { case Heartbeat: break; case TestRequest: handleTestRequest(message); break; case ResendRequest: handleResendRequest(message); break; case Reject: handleReject(message); break; case SequenceReset: handleSequenceReset(message); break; case Logout: handleLogout(message); break; case Logon: handleLogon(message); break; default: downstream.message(message); break; } } private void handleMsgSeqNum(FIXMessage message, FIXValue msgType, long msgSeqNum) throws IOException { if (msgSeqNum < rxMsgSeqNum) handleTooLowMsgSeqNum(message, msgType, msgSeqNum); else sendResendRequest(rxMsgSeqNum); } private void handleTooLowMsgSeqNum(FIXMessage message, FIXValue msgType, long msgSeqNum) throws IOException { if (msgType.length() != 1 || msgType.asChar() != SequenceReset) { FIXValue possDupFlag = message.valueOf(PossDupFlag); if (possDupFlag == null || !possDupFlag.asBoolean()) statusListener.tooLowMsgSeqNum(FIXConnection.this, msgSeqNum, rxMsgSeqNum); } } private void handleTestRequest(FIXMessage message) throws IOException { FIXValue testReqId = message.valueOf(TestReqID); if (testReqId == null) { sendReject(message.getMsgSeqNum(), RequiredTagMissing, "TestReqID(112) not found"); return; } sendHeartbeat(testReqId); } private void handleResendRequest(FIXMessage message) throws IOException { FIXValue beginSeqNo = message.valueOf(BeginSeqNo); if (beginSeqNo == null) { sendReject(message.getMsgSeqNum(), RequiredTagMissing, "BeginSeqNo(7) not found"); return; } FIXValue endSeqNo = message.valueOf(EndSeqNo); if (endSeqNo == null) { sendReject(message.getMsgSeqNum(), RequiredTagMissing, "EndSeqNo(16) not found"); return; } sendSequenceReset(beginSeqNo, endSeqNo.asInt() + 1); } private void handleReject(FIXMessage message) throws IOException { statusListener.reject(FIXConnection.this, message); } private boolean handleSequenceReset(FIXMessage message) throws IOException { FIXValue value = message.valueOf(NewSeqNo); if (value == null) { sendReject(message.getMsgSeqNum(), RequiredTagMissing, "NewSeqNo(36) not found"); return true; } long newSeqNo = value.asInt(); if (newSeqNo < rxMsgSeqNum) { sendReject(message.getMsgSeqNum(), ValueIsIncorrect, "NewSeqNo(36) too low"); return true; } rxMsgSeqNum = newSeqNo; FIXValue gapFillFlag = message.valueOf(GapFillFlag); boolean reset = gapFillFlag == null || !gapFillFlag.asBoolean(); if (reset) statusListener.sequenceReset(FIXConnection.this); return reset; } private void handleLogout(FIXMessage message) throws IOException { statusListener.logout(FIXConnection.this, message); } private void handleLogon(FIXMessage message) throws IOException { if (senderCompId.isEmpty()) { FIXValue value = message.valueOf(TargetCompID); if (value == null) { statusListener.close(FIXConnection.this, "SenderCompID(49) not found"); return; } senderCompId = value.asString(); } if (targetCompId.isEmpty()) { FIXValue value = message.valueOf(SenderCompID); if (value == null) { statusListener.close(FIXConnection.this, "TargetCompID(56) not found"); return; } targetCompId = value.asString(); } statusListener.logon(FIXConnection.this, message); } private void sendHeartbeat(FIXValue testReqId) throws IOException { prepare(txMessage, Heartbeat); txMessage.addField(TestReqID).set(testReqId); send(txMessage); } private void sendResendRequest(long beginSeqNo) throws IOException { prepare(txMessage, ResendRequest); txMessage.addField(BeginSeqNo).setInt(beginSeqNo); txMessage.addField(EndSeqNo).setInt(0); send(txMessage); } private void sendSequenceReset(FIXValue msgSeqNum, long newSeqNo) throws IOException { prepare(txMessage, SequenceReset); txMessage.valueOf(MsgSeqNum).set(msgSeqNum); txMessage.addField(GapFillFlag).setBoolean(true); txMessage.addField(NewSeqNo).setInt(newSeqNo); send(txMessage); } private void msgSeqNumNotFound() throws IOException { sendLogout("MsgSeqNum(34) not found"); } private void msgTypeNotFound() throws IOException { statusListener.close(FIXConnection.this, "MsgType(35) not found"); } } private void sendHeartbeat() throws IOException { prepare(txMessage, Heartbeat); send(txMessage); } private void sendTestRequest(CharSequence testReqId) throws IOException { prepare(txMessage, TestRequest); txMessage.addField(TestReqID).setString(testReqId); send(txMessage); } private static void tooLongMessage() throws FIXMessageOverflowException { throw new FIXMessageOverflowException("Too long message"); } }
philadelphia-core: Use 'final' modifier in 'FIXConnection'
libraries/core/src/main/java/com/paritytrading/philadelphia/FIXConnection.java
philadelphia-core: Use 'final' modifier in 'FIXConnection'
<ide><path>ibraries/core/src/main/java/com/paritytrading/philadelphia/FIXConnection.java <ide> */ <ide> public class FIXConnection implements Closeable { <ide> <del> private Clock clock; <del> <del> private SocketChannel channel; <del> <del> private FIXConfig config; <del> <del> private FIXValue bodyLength; <del> private FIXValue checkSum; <add> private final Clock clock; <add> <add> private final SocketChannel channel; <add> <add> private final FIXConfig config; <add> <add> private final FIXValue bodyLength; <add> private final FIXValue checkSum; <ide> <ide> private String senderCompId; <ide> private String targetCompId; <ide> private long rxMsgSeqNum; <ide> private long txMsgSeqNum; <ide> <del> private ByteBuffer rxBuffer; <del> <del> private ByteBuffer txHeaderBuffer; <del> <del> private int bodyLengthOffset; <del> <del> private ByteBuffer txBodyBuffer; <del> <del> private ByteBuffer[] txBuffers; <add> private final ByteBuffer rxBuffer; <add> <add> private final ByteBuffer txHeaderBuffer; <add> <add> private final int bodyLengthOffset; <add> <add> private final ByteBuffer txBodyBuffer; <add> <add> private final ByteBuffer[] txBuffers; <ide> <ide> /* <ide> * This variable is written on data reception and read on connection <ide> <ide> private final long testRequestMillis; <ide> <del> private FIXMessageParser parser; <del> <del> private FIXConnectionStatusListener statusListener; <del> <del> private FIXMessage txMessage; <add> private final FIXMessageParser parser; <add> <add> private final FIXConnectionStatusListener statusListener; <add> <add> private final FIXMessage txMessage; <ide> <ide> private long currentTimeMillis; <ide> <del> private MutableDateTime currentTime; <del> <del> private StringBuilder currentTimestamp; <add> private final MutableDateTime currentTime; <add> <add> private final StringBuilder currentTimestamp; <ide> <ide> /** <ide> * Create a connection. The underlying socket channel can be either
Java
apache-2.0
9324e6883d110b473cb7ee696f1034286c18f1c8
0
bbrodt/uberfire-eclipse,bbrodt/uberfire-eclipse,bbrodt/uberfire-eclipse
package org.uberfire.eclipse.browser.shadowservices.impl; import java.io.File; import java.lang.reflect.Field; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Collection; import java.util.List; import org.drools.compiler.compiler.BaseKnowledgeBuilderResultImpl; import org.drools.compiler.lang.descr.ImportDescr; import org.drools.compiler.lang.descr.PackageDescr; import org.drools.eclipse.DRLInfo; import org.drools.eclipse.DroolsEclipsePlugin; import org.drools.workbench.models.datamodel.rule.DSLSentence; import org.drools.workbench.screens.drltext.model.DrlModelContent; import org.drools.workbench.screens.drltext.service.DRLTextEditorService; import org.eclipse.core.resources.IFile; import org.eclipse.core.runtime.CoreException; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.storage.file.FileRepositoryBuilder; import org.eclipse.swt.browser.Browser; import org.guvnor.common.services.backend.metadata.MetadataServiceImpl; import org.guvnor.common.services.shared.message.Level; import org.guvnor.common.services.shared.metadata.model.Metadata; import org.guvnor.common.services.shared.metadata.model.Overview; import org.guvnor.common.services.shared.validation.model.ValidationMessage; import org.jboss.errai.security.shared.api.Group; import org.jboss.errai.security.shared.api.Role; import org.jboss.errai.security.shared.api.identity.User; import org.jboss.errai.security.shared.api.identity.UserImpl; import org.kie.internal.builder.KnowledgeBuilderResult; import org.kie.internal.builder.ResultSeverity; import org.uberfire.backend.server.io.ConfigIOServiceProducer; import org.uberfire.backend.vfs.Path; import org.uberfire.commons.lifecycle.PriorityDisposableRegistry; import org.uberfire.eclipse.browser.FileUtils; import org.uberfire.eclipse.browser.JavaProjectClassLoader; import org.uberfire.eclipse.browser.shadowservices.EclipseShadowService; import org.uberfire.io.IOService; import org.uberfire.io.impl.IOServiceDotFileImpl; import org.uberfire.java.nio.file.FileSystem; import org.uberfire.java.nio.file.api.FileSystemProviders; import org.uberfire.java.nio.file.spi.FileSystemProvider; import org.uberfire.rpc.SessionInfo; import org.uberfire.rpc.impl.SessionInfoImpl; /** * Service-side Shadow Service implementation of the DRLTextEditorService. * * @author bbrodt * */ public class EclipseDRLTextEditorService extends EclipseShadowService implements DRLTextEditorService { public static final String NAME = "EclipseDRLTextEditorService"; FileSystemProvider fsProvider = null; FileSystem fs = null; IOService ioService; IOService configIOService; SessionInfo sessionInfo; MetadataServiceImpl metadataService; public EclipseDRLTextEditorService(Browser browser) { super(browser, NAME); } @Override public List<ValidationMessage> validate(Path path, String content) { List<ValidationMessage> errors = new ArrayList<ValidationMessage>(); IFile file = FileUtils.getFile(path.toURI()); try { DRLInfo info = DroolsEclipsePlugin.getDefault().generateParsedResource(content, file, false, true); for (BaseKnowledgeBuilderResultImpl r : info.getParserErrors()) { ValidationMessage m = new ValidationMessage(); switch (r.getSeverity()) { case ERROR: m.setLevel(Level.ERROR); break; case INFO: m.setLevel(Level.INFO); break; case WARNING: m.setLevel(Level.WARNING); break; default: break; } m.setText(r.getMessage()); errors.add(m); } } catch (Exception e) { e.printStackTrace(); } return errors; } @Override public Path create(Path context, String fileName, String content, String comment) { // TODO Auto-generated method stub return null; } @Override public String load(Path path) { // TODO Auto-generated method stub return null; } @Override public Path save(Path path, String content, Metadata metadata, String comment) { IFile file = FileUtils.getFile(path.toURI()); if (FileUtils.write(file, content) < 0 ) return null; return path; } @Override public void delete(Path path, String comment) { // TODO Auto-generated method stub } @Override public Path copy(Path path, String newName, String comment) { // TODO Auto-generated method stub return null; } @Override public Path copy(Path path, String newName, Path targetDirectory, String comment) { // TODO Auto-generated method stub return null; } @Override public Path rename(Path path, String newName, String comment) { // TODO Auto-generated method stub return null; } public URI pathToURI(Path path) { URI uri = null; try { Repository repository = getRepository(path); if (repository==null) { uri = new URI(path.toURI()); } else { // String dir = repository.getDirectory().toURI().toString().replaceAll("file:", ""); // uri = new URI("git", "localhost", path.getFileName(), null); uri = new URI(path.toURI().replace("file:", "git:")); } } catch (Exception e) { try { uri = new URI(path.toURI()); } catch (URISyntaxException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } e.printStackTrace(); } return uri; } public static Repository getRepository(Path path) { Repository repository = null; try{ File f = new java.io.File(new URI(path.toURI())); FileRepositoryBuilder builder = new FileRepositoryBuilder(); repository = builder.findGitDir(f).build(); } catch (Exception e){ e.printStackTrace(); } return repository; } private Metadata getMetadata(Path path) { URI uri = pathToURI(path); if (metadataService==null) { try { if (fsProvider == null) { // force loading of FS providers first time fsProvider = FileSystemProviders.resolveProvider(uri); } try { fs = fsProvider.getFileSystem(uri); } catch (Exception e) { e.printStackTrace(); } if (fs==null) { try { fs = fsProvider.newFileSystem(uri, new java.util.HashMap<String,Object>()); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } // KieServices kieServices = KieServices.Factory.get(); // KieFileSystem kfs = new KieFileSystemImpl(); // KieBuilderImpl kieBuilder = (KieBuilderImpl) kieServices.newKieBuilder( kfs ); // kieBuilder.createFileSet(path.toURI().replace("file:", "")); // Results r = kieBuilder.buildAll().getResults(); PriorityDisposableRegistry.register("systemFS", fs); ioService = new IOServiceDotFileImpl(); ConfigIOServiceProducer cfiosProducer = new ConfigIOServiceProducer(); cfiosProducer.setup(); configIOService = cfiosProducer.configIOService(); Collection<Role> roles = new ArrayList<Role>(); Collection<Group> groups = new ArrayList<Group>(); User user = new UserImpl("bbrodt", roles, groups); sessionInfo = new SessionInfoImpl("bbrodt", user); metadataService = new MetadataServiceImpl(ioService, configIOService, sessionInfo); } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } } org.uberfire.java.nio.file.Path aPath = fsProvider.getPath(uri); return metadataService.getMetadata(aPath); } @Override public DrlModelContent loadContent(Path path) { IFile file = FileUtils.getFile(path.toURI()); Overview overview = new Overview(); Metadata metadata = getMetadata(path); overview.setMetadata(metadata); overview.setProjectName(file.getProject().getName()); String fileContent = ""; try { fileContent = FileUtils.read(file); } catch (CoreException e) { e.printStackTrace(); } List<String> fullyQualifiedClassNames = new ArrayList<String>(); List<DSLSentence> dslConditions = new ArrayList<DSLSentence>(); List<DSLSentence> dslActions = new ArrayList<DSLSentence>(); try { DRLInfo info = DroolsEclipsePlugin.getDefault().parseResource(file, true); PackageDescr pd = info.getPackageDescr(); for (ImportDescr id : pd.getImports()) { fullyQualifiedClassNames.add(id.getTarget()); } } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } DrlModelContent content = new DrlModelContent( fileContent, overview, fullyQualifiedClassNames, dslConditions, dslActions ); return content; } @Override public List<String> loadClassFields(Path path, String fullyQualifiedClassName) { // TODO Auto-generated method stub List<String> fields = new ArrayList<String>(); try { IFile file = FileUtils.getFile(path.toURI()); JavaProjectClassLoader cl = new JavaProjectClassLoader(file.getProject()); Class clazz = cl.loadClass(fullyQualifiedClassName); for (Field f : clazz.getDeclaredFields()) { fields.add(f.getName()); } } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } return fields; } @Override public String assertPackageName(String drl, Path resource) { // TODO Auto-generated method stub return null; } }
eclipse-plugins/org.uberfire.eclipse.browser/src/main/java/org/uberfire/eclipse/browser/shadowservices/impl/EclipseDRLTextEditorService.java
package org.uberfire.eclipse.browser.shadowservices.impl; import java.io.File; import java.lang.reflect.Field; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Collection; import java.util.List; import org.drools.compiler.lang.descr.ImportDescr; import org.drools.compiler.lang.descr.PackageDescr; import org.drools.eclipse.DRLInfo; import org.drools.eclipse.DroolsEclipsePlugin; import org.drools.workbench.models.datamodel.rule.DSLSentence; import org.drools.workbench.screens.drltext.model.DrlModelContent; import org.drools.workbench.screens.drltext.service.DRLTextEditorService; import org.eclipse.core.resources.IFile; import org.eclipse.core.runtime.CoreException; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.storage.file.FileRepositoryBuilder; import org.eclipse.swt.browser.Browser; import org.guvnor.common.services.backend.metadata.MetadataServiceImpl; import org.guvnor.common.services.shared.metadata.model.Metadata; import org.guvnor.common.services.shared.metadata.model.Overview; import org.guvnor.common.services.shared.validation.model.ValidationMessage; import org.jboss.errai.security.shared.api.Group; import org.jboss.errai.security.shared.api.Role; import org.jboss.errai.security.shared.api.identity.User; import org.jboss.errai.security.shared.api.identity.UserImpl; import org.uberfire.backend.server.io.ConfigIOServiceProducer; import org.uberfire.backend.vfs.Path; import org.uberfire.commons.lifecycle.PriorityDisposableRegistry; import org.uberfire.eclipse.browser.FileUtils; import org.uberfire.eclipse.browser.JavaProjectClassLoader; import org.uberfire.eclipse.browser.shadowservices.EclipseShadowService; import org.uberfire.io.IOService; import org.uberfire.io.impl.IOServiceDotFileImpl; import org.uberfire.java.nio.file.FileSystem; import org.uberfire.java.nio.file.api.FileSystemProviders; import org.uberfire.java.nio.file.spi.FileSystemProvider; import org.uberfire.rpc.SessionInfo; import org.uberfire.rpc.impl.SessionInfoImpl; /** * Service-side Shadow Service implementation of the DRLTextEditorService. * * @author bbrodt * */ public class EclipseDRLTextEditorService extends EclipseShadowService implements DRLTextEditorService { public static final String NAME = "EclipseDRLTextEditorService"; FileSystemProvider fsProvider = null; FileSystem fs = null; IOService ioService; IOService configIOService; SessionInfo sessionInfo; MetadataServiceImpl metadataService; // private DataModelService dataModelService; public EclipseDRLTextEditorService(Browser browser) { super(browser, NAME); } @Override public List<ValidationMessage> validate(Path path, String content) { // TODO Auto-generated method stub return null; } @Override public Path create(Path context, String fileName, String content, String comment) { // TODO Auto-generated method stub return null; } @Override public String load(Path path) { // TODO Auto-generated method stub return null; } @Override public Path save(Path path, String content, Metadata metadata, String comment) { IFile file = FileUtils.getFile(path.toURI()); if (FileUtils.write(file, content) < 0 ) return null; return path; } @Override public void delete(Path path, String comment) { // TODO Auto-generated method stub } @Override public Path copy(Path path, String newName, String comment) { // TODO Auto-generated method stub return null; } @Override public Path copy(Path path, String newName, Path targetDirectory, String comment) { // TODO Auto-generated method stub return null; } @Override public Path rename(Path path, String newName, String comment) { // TODO Auto-generated method stub return null; } public URI pathToURI(Path path) { URI uri = null; try { Repository repository = getRepository(path); if (repository==null) { uri = new URI(path.toURI()); } else { // String dir = repository.getDirectory().toURI().toString().replaceAll("file:", ""); // uri = new URI("git", "localhost", path.getFileName(), null); uri = new URI(path.toURI().replace("file:", "git:")); } } catch (Exception e) { try { uri = new URI(path.toURI()); } catch (URISyntaxException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } e.printStackTrace(); } return uri; } public static Repository getRepository(Path path) { Repository repository = null; try{ File f = new java.io.File(new URI(path.toURI())); FileRepositoryBuilder builder = new FileRepositoryBuilder(); repository = builder.findGitDir(f).build(); } catch (Exception e){ e.printStackTrace(); } return repository; } private Metadata getMetadata(Path path) { URI uri = pathToURI(path); if (metadataService==null) { try { if (fsProvider == null) { // force loading of FS providers first time fsProvider = FileSystemProviders.resolveProvider(uri); } try { fs = fsProvider.getFileSystem(uri); } catch (Exception e) { e.printStackTrace(); } if (fs==null) { try { fs = fsProvider.newFileSystem(uri, new java.util.HashMap<String,Object>()); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } // KieServices kieServices = KieServices.Factory.get(); // KieFileSystem kfs = new KieFileSystemImpl(); // KieBuilderImpl kieBuilder = (KieBuilderImpl) kieServices.newKieBuilder( kfs ); // kieBuilder.createFileSet(path.toURI().replace("file:", "")); // Results r = kieBuilder.buildAll().getResults(); PriorityDisposableRegistry.register("systemFS", fs); ioService = new IOServiceDotFileImpl(); ConfigIOServiceProducer cfiosProducer = new ConfigIOServiceProducer(); cfiosProducer.setup(); configIOService = cfiosProducer.configIOService(); Collection<Role> roles = new ArrayList<Role>(); Collection<Group> groups = new ArrayList<Group>(); User user = new UserImpl("bbrodt", roles, groups); sessionInfo = new SessionInfoImpl("bbrodt", user); metadataService = new MetadataServiceImpl(ioService, configIOService, sessionInfo); } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } } org.uberfire.java.nio.file.Path aPath = fsProvider.getPath(uri); return metadataService.getMetadata(aPath); } @Override public DrlModelContent loadContent(Path path) { IFile file = FileUtils.getFile(path.toURI()); Overview overview = new Overview(); Metadata metadata = getMetadata(path); overview.setMetadata(metadata); overview.setProjectName(file.getProject().getName()); String fileContent = ""; try { fileContent = FileUtils.read(file); } catch (CoreException e) { e.printStackTrace(); } List<String> fullyQualifiedClassNames = new ArrayList<String>(); List<DSLSentence> dslConditions = new ArrayList<DSLSentence>(); List<DSLSentence> dslActions = new ArrayList<DSLSentence>(); try { DRLInfo info = DroolsEclipsePlugin.getDefault().parseResource(file, true); PackageDescr pd = info.getPackageDescr(); for (ImportDescr id : pd.getImports()) { fullyQualifiedClassNames.add(id.getTarget()); } } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } DrlModelContent content = new DrlModelContent( fileContent, overview, fullyQualifiedClassNames, dslConditions, dslActions ); return content; } @Override public List<String> loadClassFields(Path path, String fullyQualifiedClassName) { // TODO Auto-generated method stub List<String> fields = new ArrayList<String>(); try { IFile file = FileUtils.getFile(path.toURI()); JavaProjectClassLoader cl = new JavaProjectClassLoader(file.getProject()); Class clazz = cl.loadClass(fullyQualifiedClassName); for (Field f : clazz.getDeclaredFields()) { fields.add(f.getName()); } } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } return fields; } @Override public String assertPackageName(String drl, Path resource) { // TODO Auto-generated method stub return null; } }
Implemented validation The following additional packages need to be exported from droolsjbpm-tools org.drools.eclipse plugin: org.drools.compiler.compiler org.drools.compiler.lang.descr org.drools.eclipse org.kie.internal.builder
eclipse-plugins/org.uberfire.eclipse.browser/src/main/java/org/uberfire/eclipse/browser/shadowservices/impl/EclipseDRLTextEditorService.java
Implemented validation
<ide><path>clipse-plugins/org.uberfire.eclipse.browser/src/main/java/org/uberfire/eclipse/browser/shadowservices/impl/EclipseDRLTextEditorService.java <ide> import java.util.Collection; <ide> import java.util.List; <ide> <add>import org.drools.compiler.compiler.BaseKnowledgeBuilderResultImpl; <ide> import org.drools.compiler.lang.descr.ImportDescr; <ide> import org.drools.compiler.lang.descr.PackageDescr; <ide> import org.drools.eclipse.DRLInfo; <ide> import org.eclipse.jgit.storage.file.FileRepositoryBuilder; <ide> import org.eclipse.swt.browser.Browser; <ide> import org.guvnor.common.services.backend.metadata.MetadataServiceImpl; <add>import org.guvnor.common.services.shared.message.Level; <ide> import org.guvnor.common.services.shared.metadata.model.Metadata; <ide> import org.guvnor.common.services.shared.metadata.model.Overview; <ide> import org.guvnor.common.services.shared.validation.model.ValidationMessage; <ide> import org.jboss.errai.security.shared.api.Role; <ide> import org.jboss.errai.security.shared.api.identity.User; <ide> import org.jboss.errai.security.shared.api.identity.UserImpl; <add>import org.kie.internal.builder.KnowledgeBuilderResult; <add>import org.kie.internal.builder.ResultSeverity; <ide> import org.uberfire.backend.server.io.ConfigIOServiceProducer; <ide> import org.uberfire.backend.vfs.Path; <ide> import org.uberfire.commons.lifecycle.PriorityDisposableRegistry; <ide> IOService configIOService; <ide> SessionInfo sessionInfo; <ide> MetadataServiceImpl metadataService; <del>// private DataModelService dataModelService; <ide> <ide> public EclipseDRLTextEditorService(Browser browser) { <ide> super(browser, NAME); <ide> <ide> @Override <ide> public List<ValidationMessage> validate(Path path, String content) { <del> // TODO Auto-generated method stub <del> return null; <add> List<ValidationMessage> errors = new ArrayList<ValidationMessage>(); <add> IFile file = FileUtils.getFile(path.toURI()); <add> try { <add> DRLInfo info = DroolsEclipsePlugin.getDefault().generateParsedResource(content, file, false, true); <add> for (BaseKnowledgeBuilderResultImpl r : info.getParserErrors()) { <add> ValidationMessage m = new ValidationMessage(); <add> switch (r.getSeverity()) { <add> case ERROR: <add> m.setLevel(Level.ERROR); <add> break; <add> case INFO: <add> m.setLevel(Level.INFO); <add> break; <add> case WARNING: <add> m.setLevel(Level.WARNING); <add> break; <add> default: <add> break; <add> <add> } <add> m.setText(r.getMessage()); <add> errors.add(m); <add> } <add> } catch (Exception e) { <add> e.printStackTrace(); <add> } <add> return errors; <ide> } <ide> <ide> @Override
Java
isc
688869dcf6b8ddda601e24136cbf021c227f5183
0
j256/ormlite-jdbc
package com.j256.ormlite.jdbc; import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import com.j256.ormlite.db.DatabaseType; import com.j256.ormlite.logger.Log.Level; import com.j256.ormlite.logger.Logger; import com.j256.ormlite.logger.LoggerFactory; import com.j256.ormlite.misc.IOUtils; import com.j256.ormlite.support.ConnectionSource; import com.j256.ormlite.support.DatabaseConnection; /** * Implementation of the ConnectionSource interface that supports basic pooled connections. New connections are created * on demand only if there are no dormant connections otherwise released connections will be reused. This class is * reentrant and can handle requests from multiple threads. * * <p> * <b> NOTE: </b> If you are using the Spring type wiring in Java, {@link #initialize} should be called after all of the * set methods. In Spring XML, init-method="initialize" should be used. * </p> * * <p> * <b> NOTE: </b> This class spawns a thread to test the pooled connections that are in the free-list as a keep-alive * mechanism. It will test any dormant connections every so often to see if they are still valid. If this is not the * behavior that you want then call {@link #setCheckConnectionsEveryMillis(long)} with 0 to disable the thread. You can * also call {@link #setTestBeforeGet(boolean)} and set it to true to test the connection before it is handed back to * you. * </p> * * @author graywatson */ public class JdbcPooledConnectionSource extends JdbcConnectionSource implements ConnectionSource { private static Logger logger = LoggerFactory.getLogger(JdbcPooledConnectionSource.class); private final static int DEFAULT_MAX_CONNECTIONS_FREE = 5; // maximum age that a connection can be before being closed private final static int DEFAULT_MAX_CONNECTION_AGE_MILLIS = 60 * 60 * 1000; private final static int CHECK_CONNECTIONS_EVERY_MILLIS = 30 * 1000; private int maxConnectionsFree = DEFAULT_MAX_CONNECTIONS_FREE; private long maxConnectionAgeMillis = DEFAULT_MAX_CONNECTION_AGE_MILLIS; private List<ConnectionMetaData> connFreeList = new ArrayList<ConnectionMetaData>(); protected final Map<DatabaseConnection, ConnectionMetaData> connectionMap = new HashMap<DatabaseConnection, ConnectionMetaData>(); private final Object lock = new Object(); private ConnectionTester tester = null; private String pingStatment; private int openCount = 0; private int releaseCount = 0; private int closeCount = 0; private int maxEverUsed = 0; private int testLoopCount = 0; private long checkConnectionsEveryMillis = CHECK_CONNECTIONS_EVERY_MILLIS; private boolean testBeforeGetFromPool = false; private volatile boolean isOpen = true; public JdbcPooledConnectionSource() { // for spring type wiring } public JdbcPooledConnectionSource(String url) throws SQLException { this(url, null, null, null); } public JdbcPooledConnectionSource(String url, DatabaseType databaseType) throws SQLException { this(url, null, null, databaseType); } public JdbcPooledConnectionSource(String url, String username, String password) throws SQLException { this(url, username, password, null); } public JdbcPooledConnectionSource(String url, String username, String password, DatabaseType databaseType) throws SQLException { super(url, username, password, databaseType); } @Override public void initialize() throws SQLException { super.initialize(); pingStatment = databaseType.getPingStatement(); } @Override public void close() throws IOException { if (!initialized) { throw new IOException(getClass().getSimpleName() + " was not initialized properly"); } logger.debug("closing"); synchronized (lock) { // close the outstanding connections in the list for (ConnectionMetaData connMetaData : connFreeList) { closeConnectionQuietly(connMetaData); } connFreeList.clear(); connFreeList = null; // NOTE: We can't close the ones left in the connectionMap because they may still be in use. connectionMap.clear(); isOpen = false; } } @Override public DatabaseConnection getReadOnlyConnection(String tableName) throws SQLException { // set the connection to be read-only in JDBC-land? would need to set read-only or read-write return getReadWriteConnection(tableName); } @Override public DatabaseConnection getReadWriteConnection(String tableName) throws SQLException { checkInitializedSqlException(); DatabaseConnection conn = getSavedConnection(); if (conn != null) { return conn; } synchronized (lock) { while (connFreeList.size() > 0) { // take the first one off of the list ConnectionMetaData connMetaData = getFreeConnection(); if (connMetaData == null) { // need to create a new one } else if (testBeforeGetFromPool && !testConnection(connMetaData)) { // close expired connection closeConnectionQuietly(connMetaData); } else { logger.debug("reusing connection {}", connMetaData); return connMetaData.connection; } } // if none in the free list then make a new one DatabaseConnection connection = makeConnection(logger); openCount++; // add it to our connection map connectionMap.put(connection, new ConnectionMetaData(connection, maxConnectionAgeMillis)); int maxInUse = connectionMap.size(); if (maxInUse > maxEverUsed) { maxEverUsed = maxInUse; } return connection; } } @Override public void releaseConnection(DatabaseConnection connection) throws SQLException { checkInitializedSqlException(); if (isSavedConnection(connection)) { // ignore the release when we are in a transaction return; } /* * If the connection is not close and has auto-commit turned off then we must roll-back any outstanding * statements and set auto-commit back to true. */ boolean isClosed = connection.isClosed(); if (!isClosed && !connection.isAutoCommit()) { connection.rollback(null); connection.setAutoCommit(true); } synchronized (lock) { releaseCount++; if (isClosed) { // it's already closed so just drop it ConnectionMetaData meta = connectionMap.remove(connection); if (meta == null) { logger.debug("dropping already closed unknown connection {}", connection); } else { logger.debug("dropping already closed connection {}", meta); } return; } if (connFreeList == null) { // if we've already closed the pool then just close the connection closeConnection(connection); return; } ConnectionMetaData meta = connectionMap.get(connection); if (meta == null) { logger.error("should have found connection {} in the map", connection); closeConnection(connection); } else { meta.noteUsed(); connFreeList.add(meta); logger.debug("cache released connection {}", meta); if (connFreeList.size() > maxConnectionsFree) { // close the first connection in the queue meta = connFreeList.remove(0); logger.debug("cache too full, closing connection {}", meta); closeConnection(meta.connection); } if (checkConnectionsEveryMillis > 0 && tester == null) { tester = new ConnectionTester(); tester.setName(getClass().getSimpleName() + " connection tester"); tester.setDaemon(true); tester.start(); } } } } @Override public boolean saveSpecialConnection(DatabaseConnection connection) throws SQLException { checkInitializedIllegalStateException(); boolean saved = saveSpecial(connection); if (logger.isLevelEnabled(Level.DEBUG)) { ConnectionMetaData meta = connectionMap.get(connection); logger.debug("saved special connection {}", meta); } return saved; } @Override public void clearSpecialConnection(DatabaseConnection connection) { checkInitializedIllegalStateException(); boolean cleared = clearSpecial(connection, logger); if (logger.isLevelEnabled(Level.DEBUG)) { ConnectionMetaData meta = connectionMap.get(connection); if (cleared) { logger.debug("cleared special connection {}", meta); } else { logger.debug("special connection {} not saved", meta); } } // release should then called after the clear } @Override public boolean isOpen(String tableName) { return isOpen; } @Override public boolean isSingleConnection(String tableName) { return false; } /** * Set the number of connections that can be unused in the available list. */ public void setMaxConnectionsFree(int maxConnectionsFree) { this.maxConnectionsFree = maxConnectionsFree; } /** * Set the number of milliseconds that a connection can stay open before being closed. Set to Long.MAX_VALUE to have * the connections never expire. */ public void setMaxConnectionAgeMillis(long maxConnectionAgeMillis) { this.maxConnectionAgeMillis = maxConnectionAgeMillis; } /** * Return the approximate number of connections opened over the life of the pool. */ public int getOpenCount() { return openCount; } /** * Return the approximate number of connections released over the life of the pool. */ public int getReleaseCount() { return releaseCount; } /** * Return the approximate number of connections closed over the life of the pool. */ public int getCloseCount() { return closeCount; } /** * Return the approximate maximum number of connections in use at one time. */ public int getMaxConnectionsEverUsed() { return maxEverUsed; } /** * Return the number of currently freed connections in the free list. */ public int getCurrentConnectionsFree() { synchronized (lock) { return connFreeList.size(); } } /** * Return the number of current connections that we are tracking. */ public int getCurrentConnectionsManaged() { synchronized (lock) { return connectionMap.size(); } } /** * There is an internal thread which checks each of the database connections as a keep-alive mechanism. This set the * number of milliseconds it sleeps between checks -- default is 30000. To disable the checking thread, set this to * 0 before you start using the connection source. */ public void setCheckConnectionsEveryMillis(long checkConnectionsEveryMillis) { this.checkConnectionsEveryMillis = checkConnectionsEveryMillis; } public void setTestBeforeGet(boolean testBeforeGetFromPool) { this.testBeforeGetFromPool = testBeforeGetFromPool; } /** * Mostly for testing purposes to see how many times our test loop ran. */ public int getTestLoopCount() { return testLoopCount; } /** * This should be inside of synchronized (lock) stanza. */ protected void closeConnection(DatabaseConnection connection) throws SQLException { // this can return null if we are closing the pool ConnectionMetaData meta = connectionMap.remove(connection); IOUtils.closeThrowSqlException(connection, "SQL connection"); logger.debug("closed connection {}", meta); closeCount++; } /** * Must be called inside of synchronized(lock) */ protected void closeConnectionQuietly(ConnectionMetaData connMetaData) { try { // close expired connection closeConnection(connMetaData.connection); } catch (SQLException e) { // we ignore this } } protected boolean testConnection(ConnectionMetaData connMetaData) { try { // issue our ping statement long result = connMetaData.connection.queryForLong(pingStatment); logger.trace("tested connection {}, got {}", connMetaData, result); return true; } catch (Exception e) { logger.debug(e, "testing connection {} threw exception", connMetaData); return false; } } private ConnectionMetaData getFreeConnection() { synchronized (lock) { long now = System.currentTimeMillis(); while (connFreeList.size() > 0) { // take the first one off of the list ConnectionMetaData connMetaData = connFreeList.remove(0); // is it already expired if (connMetaData.isExpired(now)) { // close expired connection closeConnectionQuietly(connMetaData); } else { connMetaData.noteUsed(); return connMetaData; } } } return null; } private void checkInitializedSqlException() throws SQLException { if (!initialized) { throw new SQLException(getClass().getSimpleName() + " was not initialized properly"); } } private void checkInitializedIllegalStateException() { if (!initialized) { throw new IllegalStateException(getClass().getSimpleName() + " was not initialized properly"); } } /** * Class to hold the connection and its meta data. */ protected static class ConnectionMetaData { public final DatabaseConnection connection; private final long expiresMillis; private long lastUsed; public ConnectionMetaData(DatabaseConnection connection, long maxConnectionAgeMillis) { this.connection = connection; long now = System.currentTimeMillis(); if (maxConnectionAgeMillis > Long.MAX_VALUE - now) { this.expiresMillis = Long.MAX_VALUE; } else { this.expiresMillis = now + maxConnectionAgeMillis; } this.lastUsed = now; } public boolean isExpired(long now) { return (expiresMillis <= now); } public long getLastUsed() { return lastUsed; } public void noteUsed() { this.lastUsed = System.currentTimeMillis(); } @Override public String toString() { return "#" + hashCode(); } } /** * Tester thread that checks the connections that we have queued to make sure they are still good. */ private class ConnectionTester extends Thread { // class field to reduce gc private Set<ConnectionMetaData> testedSet = new HashSet<ConnectionMetaData>(); @Override public void run() { while (checkConnectionsEveryMillis > 0) { try { Thread.sleep(checkConnectionsEveryMillis); if (!testConnections()) { return; } } catch (InterruptedException e) { Thread.currentThread().interrupt(); // quit if we've been interrupted return; } } } /** * Test the connections, returning true if we should continue. */ private boolean testConnections() { // clear our tested set testedSet.clear(); long now = System.currentTimeMillis(); ConnectionMetaData connMetaData = null; boolean closeLast = false; while (true) { testLoopCount++; synchronized (lock) { if (closeLast) { if (connMetaData != null) { closeConnectionQuietly(connMetaData); connMetaData = null; } closeLast = false; } if (connFreeList == null) { // we're closed return false; } // add a tested connection back into the free-list if (connMetaData != null) { // we do this so we don't have to double lock in the loop connFreeList.add(connMetaData); } if (connFreeList.isEmpty()) { // nothing to do, return to sleep and go again return true; } connMetaData = connFreeList.get(0); if (testedSet.contains(connMetaData)) { // we are done if we've tested it before on this pass return true; } // otherwise, take the first one off the list connMetaData = connFreeList.remove(0); // see if it is expires so it can be closed immediately if (connMetaData.isExpired(now)) { // close expired connection closeConnectionQuietly(connMetaData); // don't return the connection to the free list connMetaData = null; continue; } } if (testConnection(connMetaData)) { testedSet.add(connMetaData); } else { // we close this inside of the synchronized block closeLast = true; } } } } }
src/main/java/com/j256/ormlite/jdbc/JdbcPooledConnectionSource.java
package com.j256.ormlite.jdbc; import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import com.j256.ormlite.db.DatabaseType; import com.j256.ormlite.logger.Log.Level; import com.j256.ormlite.logger.Logger; import com.j256.ormlite.logger.LoggerFactory; import com.j256.ormlite.misc.IOUtils; import com.j256.ormlite.support.ConnectionSource; import com.j256.ormlite.support.DatabaseConnection; /** * Implementation of the ConnectionSource interface that supports basic pooled connections. New connections are created * on demand only if there are no dormant connections otherwise released connections will be reused. This class is * reentrant and can handle requests from multiple threads. * * <p> * <b> NOTE: </b> If you are using the Spring type wiring in Java, {@link #initialize} should be called after all of the * set methods. In Spring XML, init-method="initialize" should be used. * </p> * * <p> * <b> NOTE: </b> This class spawns a thread to test the pooled connections that are in the free-list as a keep-alive * mechanism. It will test any dormant connections every so often to see if they are still valid. If this is not the * behavior that you want then call {@link #setCheckConnectionsEveryMillis(long)} with 0 to disable the thread. You can * also call {@link #setTestBeforeGet(boolean)} and set it to true to test the connection before it is handed back to * you. * </p> * * @author graywatson */ public class JdbcPooledConnectionSource extends JdbcConnectionSource implements ConnectionSource { private static Logger logger = LoggerFactory.getLogger(JdbcPooledConnectionSource.class); private final static int DEFAULT_MAX_CONNECTIONS_FREE = 5; // maximum age that a connection can be before being closed private final static int DEFAULT_MAX_CONNECTION_AGE_MILLIS = 60 * 60 * 1000; private final static int CHECK_CONNECTIONS_EVERY_MILLIS = 30 * 1000; private int maxConnectionsFree = DEFAULT_MAX_CONNECTIONS_FREE; private long maxConnectionAgeMillis = DEFAULT_MAX_CONNECTION_AGE_MILLIS; private List<ConnectionMetaData> connFreeList = new ArrayList<ConnectionMetaData>(); protected final Map<DatabaseConnection, ConnectionMetaData> connectionMap = new HashMap<DatabaseConnection, ConnectionMetaData>(); private final Object lock = new Object(); private ConnectionTester tester = null; private String pingStatment; private int openCount = 0; private int releaseCount = 0; private int closeCount = 0; private int maxEverUsed = 0; private int testLoopCount = 0; private long checkConnectionsEveryMillis = CHECK_CONNECTIONS_EVERY_MILLIS; private boolean testBeforeGetFromPool = false; private volatile boolean isOpen = true; public JdbcPooledConnectionSource() { // for spring type wiring } public JdbcPooledConnectionSource(String url) throws SQLException { this(url, null, null, null); } public JdbcPooledConnectionSource(String url, DatabaseType databaseType) throws SQLException { this(url, null, null, databaseType); } public JdbcPooledConnectionSource(String url, String username, String password) throws SQLException { this(url, username, password, null); } public JdbcPooledConnectionSource(String url, String username, String password, DatabaseType databaseType) throws SQLException { super(url, username, password, databaseType); } @Override public void initialize() throws SQLException { super.initialize(); pingStatment = databaseType.getPingStatement(); } @Override public void close() throws IOException { if (!initialized) { throw new IOException(getClass().getSimpleName() + " was not initialized properly"); } logger.debug("closing"); synchronized (lock) { // close the outstanding connections in the list for (ConnectionMetaData connMetaData : connFreeList) { closeConnectionQuietly(connMetaData); } connFreeList.clear(); connFreeList = null; // NOTE: We can't close the ones left in the connectionMap because they may still be in use. connectionMap.clear(); isOpen = false; } } @Override public DatabaseConnection getReadOnlyConnection(String tableName) throws SQLException { // set the connection to be read-only in JDBC-land? would need to set read-only or read-write return getReadWriteConnection(tableName); } @Override public DatabaseConnection getReadWriteConnection(String tableName) throws SQLException { checkInitializedSqlException(); DatabaseConnection conn = getSavedConnection(); if (conn != null) { return conn; } synchronized (lock) { while (connFreeList.size() > 0) { // take the first one off of the list ConnectionMetaData connMetaData = getFreeConnection(); if (connMetaData == null) { // need to create a new one } else if (testBeforeGetFromPool && !testConnection(connMetaData)) { // close expired connection closeConnectionQuietly(connMetaData); } else { logger.debug("reusing connection {}", connMetaData); return connMetaData.connection; } } // if none in the free list then make a new one DatabaseConnection connection = makeConnection(logger); openCount++; // add it to our connection map connectionMap.put(connection, new ConnectionMetaData(connection, maxConnectionAgeMillis)); int maxInUse = connectionMap.size(); if (maxInUse > maxEverUsed) { maxEverUsed = maxInUse; } return connection; } } @Override public void releaseConnection(DatabaseConnection connection) throws SQLException { checkInitializedSqlException(); if (isSavedConnection(connection)) { // ignore the release when we are in a transaction return; } /* * If the connection is not close and has auto-commit turned off then we must roll-back any outstanding * statements and set auto-commit back to true. */ boolean isClosed = connection.isClosed(); if (!isClosed && !connection.isAutoCommit()) { connection.rollback(null); connection.setAutoCommit(true); } synchronized (lock) { releaseCount++; if (isClosed) { // it's already closed so just drop it ConnectionMetaData meta = connectionMap.remove(connection); if (meta == null) { logger.debug("dropping already closed unknown connection {}", connection); } else { logger.debug("dropping already closed connection {}", meta); } return; } if (connFreeList == null) { // if we've already closed the pool then just close the connection closeConnection(connection); return; } ConnectionMetaData meta = connectionMap.get(connection); if (meta == null) { logger.error("should have found connection {} in the map", connection); closeConnection(connection); } else { meta.noteUsed(); connFreeList.add(meta); logger.debug("cache released connection {}", meta); if (connFreeList.size() > maxConnectionsFree) { // close the first connection in the queue meta = connFreeList.remove(0); logger.debug("cache too full, closing connection {}", meta); closeConnection(meta.connection); } if (checkConnectionsEveryMillis > 0 && tester == null) { tester = new ConnectionTester(); tester.setName(getClass().getSimpleName() + " connection tester"); tester.setDaemon(true); tester.start(); } } } } @Override public boolean saveSpecialConnection(DatabaseConnection connection) throws SQLException { checkInitializedIllegalStateException(); boolean saved = saveSpecial(connection); if (logger.isLevelEnabled(Level.DEBUG)) { ConnectionMetaData meta = connectionMap.get(connection); logger.debug("saved special connection {}", meta); } return saved; } @Override public void clearSpecialConnection(DatabaseConnection connection) { checkInitializedIllegalStateException(); boolean cleared = clearSpecial(connection, logger); if (logger.isLevelEnabled(Level.DEBUG)) { ConnectionMetaData meta = connectionMap.get(connection); if (cleared) { logger.debug("cleared special connection {}", meta); } else { logger.debug("special connection {} not saved", meta); } } // release should then called after the clear } @Override public boolean isOpen(String tableName) { return isOpen; } @Override public boolean isSingleConnection(String tableName) { return false; } /** * Set the number of connections that can be unused in the available list. */ public void setMaxConnectionsFree(int maxConnectionsFree) { this.maxConnectionsFree = maxConnectionsFree; } /** * Set the number of milliseconds that a connection can stay open before being closed. Set to Long.MAX_VALUE to have * the connections never expire. */ public void setMaxConnectionAgeMillis(long maxConnectionAgeMillis) { this.maxConnectionAgeMillis = maxConnectionAgeMillis; } /** * Return the approximate number of connections opened over the life of the pool. */ public int getOpenCount() { return openCount; } /** * Return the approximate number of connections released over the life of the pool. */ public int getReleaseCount() { return releaseCount; } /** * Return the approximate number of connections closed over the life of the pool. */ public int getCloseCount() { return closeCount; } /** * Return the approximate maximum number of connections in use at one time. */ public int getMaxConnectionsEverUsed() { return maxEverUsed; } /** * Return the number of currently freed connections in the free list. */ public int getCurrentConnectionsFree() { synchronized (lock) { return connFreeList.size(); } } /** * Return the number of current connections that we are tracking. */ public int getCurrentConnectionsManaged() { synchronized (lock) { return connectionMap.size(); } } /** * There is an internal thread which checks each of the database connections as a keep-alive mechanism. This set the * number of milliseconds it sleeps between checks -- default is 30000. To disable the checking thread, set this to * 0 before you start using the connection source. */ public void setCheckConnectionsEveryMillis(long checkConnectionsEveryMillis) { this.checkConnectionsEveryMillis = checkConnectionsEveryMillis; } public void setTestBeforeGet(boolean testBeforeGetFromPool) { this.testBeforeGetFromPool = testBeforeGetFromPool; } /** * Mostly for testing purposes to see how many times our test loop ran. */ public int getTestLoopCount() { return testLoopCount; } /** * This should be inside of synchronized (lock) stanza. */ protected void closeConnection(DatabaseConnection connection) throws SQLException { // this can return null if we are closing the pool ConnectionMetaData meta = connectionMap.remove(connection); IOUtils.closeThrowSqlException(connection, "SQL connection"); logger.debug("closed connection {}", meta); closeCount++; } /** * Must be called inside of synchronized(lock) */ protected void closeConnectionQuietly(ConnectionMetaData connMetaData) { try { // close expired connection closeConnection(connMetaData.connection); } catch (SQLException e) { // we ignore this } } protected boolean testConnection(ConnectionMetaData connMetaData) { try { // issue our ping statement long result = connMetaData.connection.queryForLong(pingStatment); logger.trace("tested connection {}, got {}", connMetaData, result); return true; } catch (Exception e) { logger.debug(e, "testing connection {} threw exception: {}", connMetaData, e); return false; } } private ConnectionMetaData getFreeConnection() { synchronized (lock) { long now = System.currentTimeMillis(); while (connFreeList.size() > 0) { // take the first one off of the list ConnectionMetaData connMetaData = connFreeList.remove(0); // is it already expired if (connMetaData.isExpired(now)) { // close expired connection closeConnectionQuietly(connMetaData); } else { connMetaData.noteUsed(); return connMetaData; } } } return null; } private void checkInitializedSqlException() throws SQLException { if (!initialized) { throw new SQLException(getClass().getSimpleName() + " was not initialized properly"); } } private void checkInitializedIllegalStateException() { if (!initialized) { throw new IllegalStateException(getClass().getSimpleName() + " was not initialized properly"); } } /** * Class to hold the connection and its meta data. */ protected static class ConnectionMetaData { public final DatabaseConnection connection; private final long expiresMillis; private long lastUsed; public ConnectionMetaData(DatabaseConnection connection, long maxConnectionAgeMillis) { this.connection = connection; long now = System.currentTimeMillis(); if (maxConnectionAgeMillis > Long.MAX_VALUE - now) { this.expiresMillis = Long.MAX_VALUE; } else { this.expiresMillis = now + maxConnectionAgeMillis; } this.lastUsed = now; } public boolean isExpired(long now) { return (expiresMillis <= now); } public long getLastUsed() { return lastUsed; } public void noteUsed() { this.lastUsed = System.currentTimeMillis(); } @Override public String toString() { return "#" + hashCode(); } } /** * Tester thread that checks the connections that we have queued to make sure they are still good. */ private class ConnectionTester extends Thread { // class field to reduce gc private Set<ConnectionMetaData> testedSet = new HashSet<ConnectionMetaData>(); @Override public void run() { while (checkConnectionsEveryMillis > 0) { try { Thread.sleep(checkConnectionsEveryMillis); if (!testConnections()) { return; } } catch (InterruptedException e) { Thread.currentThread().interrupt(); // quit if we've been interrupted return; } } } /** * Test the connections, returning true if we should continue. */ private boolean testConnections() { // clear our tested set testedSet.clear(); long now = System.currentTimeMillis(); ConnectionMetaData connMetaData = null; boolean closeLast = false; while (true) { testLoopCount++; synchronized (lock) { if (closeLast) { if (connMetaData != null) { closeConnectionQuietly(connMetaData); connMetaData = null; } closeLast = false; } if (connFreeList == null) { // we're closed return false; } // add a tested connection back into the free-list if (connMetaData != null) { // we do this so we don't have to double lock in the loop connFreeList.add(connMetaData); } if (connFreeList.isEmpty()) { // nothing to do, return to sleep and go again return true; } connMetaData = connFreeList.get(0); if (testedSet.contains(connMetaData)) { // we are done if we've tested it before on this pass return true; } // otherwise, take the first one off the list connMetaData = connFreeList.remove(0); // see if it is expires so it can be closed immediately if (connMetaData.isExpired(now)) { // close expired connection closeConnectionQuietly(connMetaData); // don't return the connection to the free list connMetaData = null; continue; } } if (testConnection(connMetaData)) { testedSet.add(connMetaData); } else { // we close this inside of the synchronized block closeLast = true; } } } } }
Fixed exception logging.
src/main/java/com/j256/ormlite/jdbc/JdbcPooledConnectionSource.java
Fixed exception logging.
<ide><path>rc/main/java/com/j256/ormlite/jdbc/JdbcPooledConnectionSource.java <ide> logger.trace("tested connection {}, got {}", connMetaData, result); <ide> return true; <ide> } catch (Exception e) { <del> logger.debug(e, "testing connection {} threw exception: {}", connMetaData, e); <add> logger.debug(e, "testing connection {} threw exception", connMetaData); <ide> return false; <ide> } <ide> }
JavaScript
mit
49c3e0a201d333d4b76863768860f4dc98a568b2
0
zeit/next.js,JeromeFitz/next.js,azukaru/next.js,azukaru/next.js,azukaru/next.js,flybayer/next.js,flybayer/next.js,flybayer/next.js,flybayer/next.js,zeit/next.js,JeromeFitz/next.js,JeromeFitz/next.js,JeromeFitz/next.js,zeit/next.js,azukaru/next.js
/* eslint-env jest */ import { join } from 'path' import { renderViaHTTP, findPort, launchApp, killApp } from 'next-test-utils' // test suites import hmr from './hmr' import errorRecovery from './error-recovery' import dynamic from './dynamic' import processEnv from './process-env' import publicFolder from './public-folder' import security from './security' import developmentLogs from './development-logs' const context = {} jest.setTimeout(1000 * 60 * 5) describe('Basic Features', () => { beforeAll(async () => { context.appPort = await findPort() context.server = await launchApp(join(__dirname, '../'), context.appPort, { env: { __NEXT_TEST_WITH_DEVTOOL: 1 }, }) }) afterAll(() => killApp(context.server)) dynamic(context, (p, q) => renderViaHTTP(context.appPort, p, q)) hmr(context, (p, q) => renderViaHTTP(context.appPort, p, q)) errorRecovery(context, (p, q) => renderViaHTTP(context.appPort, p, q)) processEnv(context) publicFolder(context) security(context) developmentLogs(context) })
test/integration/basic/test/index.test.js
/* eslint-env jest */ import { join } from 'path' import { renderViaHTTP, findPort, launchApp, killApp } from 'next-test-utils' // test suits import hmr from './hmr' import errorRecovery from './error-recovery' import dynamic from './dynamic' import processEnv from './process-env' import publicFolder from './public-folder' import security from './security' import developmentLogs from './development-logs' const context = {} jest.setTimeout(1000 * 60 * 5) describe('Basic Features', () => { beforeAll(async () => { context.appPort = await findPort() context.server = await launchApp(join(__dirname, '../'), context.appPort, { env: { __NEXT_TEST_WITH_DEVTOOL: 1 }, }) }) afterAll(() => killApp(context.server)) dynamic(context, (p, q) => renderViaHTTP(context.appPort, p, q)) hmr(context, (p, q) => renderViaHTTP(context.appPort, p, q)) errorRecovery(context, (p, q) => renderViaHTTP(context.appPort, p, q)) processEnv(context) publicFolder(context) security(context) developmentLogs(context) })
fix(tests): fixes typo in basic integration test (#28158) ## Bug - [ ] Related issues linked using `fixes #number` - [ ] Integration tests added - [ ] Errors have helpful link attached, see `contributing.md` ## Feature - [ ] Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. - [ ] Related issues linked using `fixes #number` - [ ] Integration tests added - [ ] Documentation added - [ ] Telemetry added. In case of a feature if it's used or not. - [ ] Errors have helpful link attached, see `contributing.md` ## Documentation / Examples - [x] Make sure the linting passes While reading the code I noticed this typo.
test/integration/basic/test/index.test.js
fix(tests): fixes typo in basic integration test (#28158)
<ide><path>est/integration/basic/test/index.test.js <ide> import { join } from 'path' <ide> import { renderViaHTTP, findPort, launchApp, killApp } from 'next-test-utils' <ide> <del>// test suits <add>// test suites <ide> import hmr from './hmr' <ide> import errorRecovery from './error-recovery' <ide> import dynamic from './dynamic'
Java
apache-2.0
51a429ebd9bab4881829d6e83e31ef068ec2caae
0
iBotPeaches/Apktool,Benjamin-Dobell/Apktool,kesuki/Apktool,berkus/android-apktool,blaquee/Apktool,KuaiFaMaster/Apktool,harish123400/Apktool,yujokang/Apktool,kuter007/android-apktool,sawrus/Apktool,kesuki/Apktool,yunemr/Apktool,lovely3x/Apktool,guiyu/android-apktool,HackerTool/Apktool,phhusson/Apktool,lczgywzyy/Apktool,jasonzhong/Apktool,draekko/Apktool,androidmchen/Apktool,Yaeger/Apktool,MiCode/brut.apktool,zhic5352/Apktool,370829592/android-apktool,pandazheng/Apktool,PiR43/Apktool,digshock/android-apktool,lovely3x/Apktool,yujokang/Apktool,digshock/android-apktool,blaquee/Apktool,nitinverma/Apktool,youleyu/android-apktool,berkus/android-apktool,tmpgit/Apktool,valery-barysok/Apktool,desword/android-apktool,valery-barysok/Apktool,draekko/Apktool,alipov/Apktool,fabiand93/Apktool,androidmchen/Apktool,fromsatellite/Apktool,ccgreen13/Apktool,dankoman30/Apktool,bingshi/android-apktool,simtel12/Apktool,Benjamin-Dobell/Apktool,simtel12/Apktool,pwelyn/Apktool,admin-zhx/Apktool,KuaiFaMaster/Apktool,akhirasip/Apktool,jasonzhong/Apktool,chenrui2014/Apktool,asolfre/android-apktool,iBotPeaches/Apktool,Klozz/Apktool,CheungSKei/Apktool,yunemr/Apktool,nitinverma/Apktool,kuter007/android-apktool,desword/android-apktool,alipov/Apktool,hongnguyenpro/Apktool,jianglibo/Apktool,fromsatellite/Apktool,admin-zhx/Apktool,youleyu/android-apktool,lczgywzyy/Apktool,jianglibo/Apktool,zhakui/Apktool,hongnguyenpro/Apktool,zdzhjx/android-apktool,zhanwei/android-apktool,akhirasip/Apktool,sawrus/Apktool,virustotalop/Apktool,zhakui/Apktool,Yaeger/Apktool,Klozz/Apktool,chenrui2014/Apktool,dankoman30/Apktool,bingshi/android-apktool,guiyu/android-apktool,lnln1111/android-apktool,ccgreen13/Apktool,HackerTool/Apktool,kaneawk/Apktool,rover12421/Apktool,kaneawk/Apktool,PiR43/Apktool,lnln1111/android-apktool,zdzhjx/android-apktool,pwelyn/Apktool,harish123400/Apktool,phhusson/Apktool,asolfre/android-apktool,iAmGhost/brut.apktool,zhic5352/Apktool,fabiand93/Apktool,virustotalop/Apktool,rover12421/Apktool,tmpgit/Apktool,zhanwei/android-apktool,CheungSKei/Apktool,pandazheng/Apktool,370829592/android-apktool
/* * Copyright 2010 Ryszard Wiśniewski <[email protected]>. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * under the License. */ package brut.androlib.res; import brut.androlib.AndrolibException; import brut.androlib.err.CantFindFrameworkResException; import brut.androlib.res.data.*; import brut.androlib.res.data.value.ResXmlSerializable; import brut.androlib.res.decoder.*; import brut.androlib.res.util.ExtFile; import brut.androlib.res.util.ExtMXSerializer; import brut.common.BrutException; import brut.directory.*; import brut.util.*; import java.io.*; import java.util.*; import java.util.logging.Logger; import org.apache.commons.io.IOUtils; import org.xmlpull.v1.XmlSerializer; /** * @author Ryszard Wiśniewski <[email protected]> */ final public class AndrolibResources { public ResTable getResTable(ExtFile apkFile) throws AndrolibException { ResTable resTable = new ResTable(this); loadMainPkg(resTable, apkFile); return resTable; } public ResPackage loadMainPkg(ResTable resTable, ExtFile apkFile) throws AndrolibException { LOGGER.info("Loading resource table..."); ResPackage[] pkgs = getResPackagesFromApk(apkFile, resTable); ResPackage pkg = null; switch (pkgs.length) { case 1: pkg = pkgs[0]; break; case 2: if (pkgs[0].getName().equals("android")) { LOGGER.warning("Skipping \"android\" package group"); pkg = pkgs[1]; } break; } if (pkg == null) { throw new AndrolibException( "Arsc files with zero or multiple packages"); } resTable.addPackage(pkg, true); return pkg; } public ResPackage loadFrameworkPkg(ResTable resTable, int id, String frameTag) throws AndrolibException { File apk = getFrameworkApk(id, frameTag); LOGGER.info("Loading resource table from file: " + apk); ResPackage[] pkgs = getResPackagesFromApk(new ExtFile(apk), resTable); if (pkgs.length != 1) { throw new AndrolibException( "Arsc files with zero or multiple packages"); } ResPackage pkg = pkgs[0]; if (pkg.getId() != id) { throw new AndrolibException("Expected pkg of id: " + String.valueOf(id) + ", got: " + pkg.getId()); } resTable.addPackage(pkg, false); return pkg; } public void decode(ResTable resTable, ExtFile apkFile, File outDir) throws AndrolibException { Duo<ResFileDecoder, ResAttrDecoder> duo = getResFileDecoder(); ResFileDecoder fileDecoder = duo.m1; ResAttrDecoder attrDecoder = duo.m2; attrDecoder.setCurrentPackage( resTable.listMainPackages().iterator().next()); Directory in, out, out9Patch; try { in = apkFile.getDirectory(); out = new FileDirectory(outDir); fileDecoder.decode( in, "AndroidManifest.xml", out, "AndroidManifest.xml", "xml"); out9Patch = out.createDir("9patch/res"); in = in.getDir("res"); out = out.createDir("res"); } catch (DirectoryException ex) { throw new AndrolibException(ex); } ExtMXSerializer xmlSerializer = getResXmlSerializer(); for (ResPackage pkg : resTable.listMainPackages()) { attrDecoder.setCurrentPackage(pkg); for (ResResource res : pkg.listFiles()) { fileDecoder.decode(res, in, out, out9Patch); } for (ResValuesFile valuesFile : pkg.listValuesFiles()) { generateValuesFile(valuesFile, out, xmlSerializer); } generatePublicXml(pkg, out, xmlSerializer); } } public void aaptPackage(File apkFile, File manifest, File resDir, File rawDir, File assetDir, File[] include, boolean update, boolean framework) throws AndrolibException { List<String> cmd = new ArrayList<String>(); cmd.add("aapt"); cmd.add("p"); if (update) { cmd.add("-u"); } cmd.add("-F"); cmd.add(apkFile.getAbsolutePath()); if (framework) { cmd.add("-x"); cmd.add("-0"); cmd.add("arsc"); } if (include != null) { for (File file : include) { cmd.add("-I"); cmd.add(file.getPath()); } } if (resDir != null) { cmd.add("-S"); cmd.add(resDir.getAbsolutePath()); } if (manifest != null) { cmd.add("-M"); cmd.add(manifest.getAbsolutePath()); } if (assetDir != null) { cmd.add("-A"); cmd.add(assetDir.getAbsolutePath()); } if (rawDir != null) { cmd.add(rawDir.getAbsolutePath()); } try { OS.exec(cmd.toArray(new String[0])); } catch (BrutException ex) { throw new AndrolibException(ex); } } public boolean detectWhetherAppIsFramework(File appDir) throws AndrolibException { File publicXml = new File(appDir, "res/values/public.xml"); if (! publicXml.exists()) { return false; } Iterator<String> it; try { it = IOUtils.lineIterator( new FileReader(new File(appDir, "res/values/public.xml"))); } catch (FileNotFoundException ex) { throw new AndrolibException( "Could not detect whether app is framework one", ex); } it.next(); it.next(); return it.next().contains("0x01"); } public void tagSmaliResIDs(ResTable resTable, File smaliDir) throws AndrolibException { new ResSmaliUpdater().tagResIDs(resTable, smaliDir); } public void updateSmaliResIDs(ResTable resTable, File smaliDir) throws AndrolibException { new ResSmaliUpdater().updateResIDs(resTable, smaliDir); } public Duo<ResFileDecoder, ResAttrDecoder> getResFileDecoder() { ResStreamDecoderContainer decoders = new ResStreamDecoderContainer(); decoders.setDecoder("raw", new ResRawStreamDecoder()); ResAttrDecoder attrDecoder = new ResAttrDecoder(); AXmlResourceParser axmlParser = new AXmlResourceParser(); axmlParser.setAttrDecoder(attrDecoder); decoders.setDecoder("xml", new XmlPullStreamDecoder(axmlParser, getResXmlSerializer())); return new Duo<ResFileDecoder, ResAttrDecoder>( new ResFileDecoder(decoders), attrDecoder); } public ExtMXSerializer getResXmlSerializer() { ExtMXSerializer serial = new ExtMXSerializer(); serial.setProperty(serial.PROPERTY_SERIALIZER_INDENTATION, " "); serial.setProperty(serial.PROPERTY_SERIALIZER_LINE_SEPARATOR, System.getProperty("line.separator")); serial.setProperty(ExtMXSerializer.PROPERTY_DEFAULT_ENCODING, "UTF-8"); return serial; } private void generateValuesFile(ResValuesFile valuesFile, Directory out, XmlSerializer serial) throws AndrolibException { try { OutputStream outStream = out.getFileOutput(valuesFile.getPath()); serial.setOutput((outStream), null); serial.startDocument(null, null); serial.startTag(null, "resources"); for (ResResource res : valuesFile.listResources()) { if (valuesFile.isSynthesized(res)) { continue; } ((ResXmlSerializable) res.getValue()) .serializeToXml(serial, res); } serial.endTag(null, "resources"); serial.endDocument(); serial.flush(); outStream.close(); } catch (IOException ex) { throw new AndrolibException( "Could not generate: " + valuesFile.getPath(), ex); } catch (DirectoryException ex) { throw new AndrolibException( "Could not generate: " + valuesFile.getPath(), ex); } } private void generatePublicXml(ResPackage pkg, Directory out, XmlSerializer serial) throws AndrolibException { try { OutputStream outStream = out.getFileOutput("values/public.xml"); serial.setOutput(outStream, null); serial.startDocument(null, null); serial.startTag(null, "resources"); for (ResResSpec spec : pkg.listResSpecs()) { serial.startTag(null, "public"); serial.attribute(null, "type", spec.getType().getName()); serial.attribute(null, "name", spec.getName()); serial.attribute(null, "id", String.format( "0x%08x", spec.getId().id)); serial.endTag(null, "public"); } serial.endTag(null, "resources"); serial.endDocument(); serial.flush(); outStream.close(); } catch (IOException ex) { throw new AndrolibException( "Could not generate public.xml file", ex); } catch (DirectoryException ex) { throw new AndrolibException( "Could not generate public.xml file", ex); } } private ResPackage[] getResPackagesFromApk(ExtFile apkFile, ResTable resTable) throws AndrolibException { try { return ARSCDecoder.decode( apkFile.getDirectory().getFileInput("resources.arsc"), resTable); } catch (DirectoryException ex) { throw new AndrolibException( "Could not load resources.arsc from file: " + apkFile, ex); } } private File getFrameworkApk(int id, String frameTag) throws AndrolibException { File dir = getFrameworkDir(); File apk; if (frameTag != null) { apk = new File(dir, String.valueOf(id) + '-' + frameTag + ".apk"); if (apk.exists()) { return apk; } } apk = new File(dir, String.valueOf(id) + ".apk"); if (apk.exists()) { return apk; } if (id == 1) { InputStream in = null; OutputStream out = null; try { in = AndrolibResources.class.getResourceAsStream( "/brut/androlib/android-framework.jar"); out = new FileOutputStream(apk); IOUtils.copy(in, out); return apk; } catch (IOException ex) { throw new AndrolibException(ex); } finally { if (in != null) { try { in.close(); } catch (IOException ex) {} } if (out != null) { try { out.close(); } catch (IOException ex) {} } } } throw new CantFindFrameworkResException(id); } private File getFrameworkDir() throws AndrolibException { File dir = new File(System.getProperty("user.home") + File.separatorChar + "apktool" + File.separatorChar + "framework"); if (! dir.exists()) { if (! dir.mkdirs()) { throw new AndrolibException("Can't create directory: " + dir); } } return dir; } public File getAndroidResourcesFile() throws AndrolibException { try { return Jar.getResourceAsFile("/brut/androlib/android-framework.jar"); } catch (BrutException ex) { throw new AndrolibException(ex); } } public File getHtcResourcesFile() throws AndrolibException { try { return Jar.getResourceAsFile( "/brut/androlib/com.htc.resources.apk"); } catch (BrutException ex) { throw new AndrolibException(ex); } } public static String escapeForResXml(String value) { if (value.isEmpty()) { return value; } StringBuilder out = new StringBuilder(value.length() + 10); char[] chars = value.toCharArray(); switch (chars[0]) { case '@': case '#': case '?': out.append('\\'); } boolean space = true; for (int i = 0; i < chars.length; i++) { char c = chars[i]; if (c == ' ') { if (space) { out.append("\\u0020"); } else { out.append(c); space = true; } continue; } space = false; switch (c) { case '\\': case '\'': case '"': out.append('\\'); break; case '\n': out.append("\\n"); continue; } out.append(c); } if (space && out.charAt(out.length() - 1) == ' ') { out.deleteCharAt(out.length() - 1); out.append("\\u0020"); } return out.toString(); } private final static Logger LOGGER = Logger.getLogger(AndrolibResources.class.getName()); }
src/brut/androlib/res/AndrolibResources.java
/* * Copyright 2010 Ryszard Wiśniewski <[email protected]>. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * under the License. */ package brut.androlib.res; import brut.androlib.AndrolibException; import brut.androlib.err.CantFindFrameworkResException; import brut.androlib.res.data.*; import brut.androlib.res.data.value.ResXmlSerializable; import brut.androlib.res.decoder.*; import brut.androlib.res.util.ExtFile; import brut.androlib.res.util.ExtMXSerializer; import brut.common.BrutException; import brut.directory.*; import brut.util.*; import java.io.*; import java.util.*; import java.util.logging.Logger; import org.apache.commons.io.IOUtils; import org.xmlpull.v1.XmlSerializer; /** * @author Ryszard Wiśniewski <[email protected]> */ final public class AndrolibResources { public ResTable getResTable(ExtFile apkFile) throws AndrolibException { ResTable resTable = new ResTable(this); loadMainPkg(resTable, apkFile); return resTable; } public ResPackage loadMainPkg(ResTable resTable, ExtFile apkFile) throws AndrolibException { LOGGER.info("Loading resource table..."); ResPackage[] pkgs = getResPackagesFromApk(apkFile, resTable); ResPackage pkg = null; switch (pkgs.length) { case 1: pkg = pkgs[0]; break; case 2: if (pkgs[0].getName().equals("android")) { LOGGER.warning("Skipping \"android\" package group"); pkg = pkgs[1]; } break; } if (pkg == null) { throw new AndrolibException( "Arsc files with zero or multiple packages"); } resTable.addPackage(pkg, true); return pkg; } public ResPackage loadFrameworkPkg(ResTable resTable, int id, String frameTag) throws AndrolibException { File apk = getFrameworkApk(id, frameTag); LOGGER.info("Loading resource table from file: " + apk); ResPackage[] pkgs = getResPackagesFromApk(new ExtFile(apk), resTable); if (pkgs.length != 1) { throw new AndrolibException( "Arsc files with zero or multiple packages"); } ResPackage pkg = pkgs[0]; if (pkg.getId() != id) { throw new AndrolibException("Expected pkg of id: " + String.valueOf(id) + ", got: " + pkg.getId()); } resTable.addPackage(pkg, false); return pkg; } public void decode(ResTable resTable, ExtFile apkFile, File outDir) throws AndrolibException { Duo<ResFileDecoder, ResAttrDecoder> duo = getResFileDecoder(); ResFileDecoder fileDecoder = duo.m1; ResAttrDecoder attrDecoder = duo.m2; attrDecoder.setCurrentPackage( resTable.listMainPackages().iterator().next()); Directory in, out, out9Patch; try { in = apkFile.getDirectory(); out = new FileDirectory(outDir); fileDecoder.decode( in, "AndroidManifest.xml", out, "AndroidManifest.xml", "xml"); out9Patch = out.createDir("9patch/res"); in = in.getDir("res"); out = out.createDir("res"); } catch (DirectoryException ex) { throw new AndrolibException(ex); } ExtMXSerializer xmlSerializer = getResXmlSerializer(); for (ResPackage pkg : resTable.listMainPackages()) { attrDecoder.setCurrentPackage(pkg); for (ResResource res : pkg.listFiles()) { fileDecoder.decode(res, in, out, out9Patch); } for (ResValuesFile valuesFile : pkg.listValuesFiles()) { generateValuesFile(valuesFile, out, xmlSerializer); } generatePublicXml(pkg, out, xmlSerializer); } } public void aaptPackage(File apkFile, File manifest, File resDir, File rawDir, File assetDir, File[] include, boolean update, boolean framework) throws AndrolibException { List<String> cmd = new ArrayList<String>(); cmd.add("aapt"); cmd.add("p"); if (update) { cmd.add("-u"); } cmd.add("-F"); cmd.add(apkFile.getAbsolutePath()); if (framework) { cmd.add("-x"); cmd.add("-0"); cmd.add("arsc"); } if (include != null) { for (File file : include) { cmd.add("-I"); cmd.add(file.getPath()); } } if (resDir != null) { cmd.add("-S"); cmd.add(resDir.getAbsolutePath()); } if (manifest != null) { cmd.add("-M"); cmd.add(manifest.getAbsolutePath()); } if (assetDir != null) { cmd.add("-A"); cmd.add(assetDir.getAbsolutePath()); } if (rawDir != null) { cmd.add(rawDir.getAbsolutePath()); } try { OS.exec(cmd.toArray(new String[0])); } catch (BrutException ex) { throw new AndrolibException(ex); } } public boolean detectWhetherAppIsFramework(File appDir) throws AndrolibException { File publicXml = new File(appDir, "res/values/public.xml"); if (! publicXml.exists()) { return false; } Iterator<String> it; try { it = IOUtils.lineIterator( new FileReader(new File(appDir, "res/values/public.xml"))); } catch (FileNotFoundException ex) { throw new AndrolibException( "Could not detect whether app is framework one", ex); } it.next(); it.next(); return it.next().contains("0x01"); } public void tagSmaliResIDs(ResTable resTable, File smaliDir) throws AndrolibException { new ResSmaliUpdater().tagResIDs(resTable, smaliDir); } public void updateSmaliResIDs(ResTable resTable, File smaliDir) throws AndrolibException { new ResSmaliUpdater().updateResIDs(resTable, smaliDir); } public Duo<ResFileDecoder, ResAttrDecoder> getResFileDecoder() { ResStreamDecoderContainer decoders = new ResStreamDecoderContainer(); decoders.setDecoder("raw", new ResRawStreamDecoder()); ResAttrDecoder attrDecoder = new ResAttrDecoder(); AXmlResourceParser axmlParser = new AXmlResourceParser(); axmlParser.setAttrDecoder(attrDecoder); decoders.setDecoder("xml", new XmlPullStreamDecoder(axmlParser, getResXmlSerializer())); return new Duo<ResFileDecoder, ResAttrDecoder>( new ResFileDecoder(decoders), attrDecoder); } public ExtMXSerializer getResXmlSerializer() { ExtMXSerializer serial = new ExtMXSerializer(); serial.setProperty(serial.PROPERTY_SERIALIZER_INDENTATION, " "); serial.setProperty(serial.PROPERTY_SERIALIZER_LINE_SEPARATOR, System.getProperty("line.separator")); serial.setProperty(ExtMXSerializer.PROPERTY_DEFAULT_ENCODING, "UTF-8"); return serial; } private void generateValuesFile(ResValuesFile valuesFile, Directory out, XmlSerializer serial) throws AndrolibException { try { OutputStream outStream = out.getFileOutput(valuesFile.getPath()); serial.setOutput((outStream), null); serial.startDocument(null, null); serial.startTag(null, "resources"); for (ResResource res : valuesFile.listResources()) { if (valuesFile.isSynthesized(res)) { continue; } ((ResXmlSerializable) res.getValue()) .serializeToXml(serial, res); } serial.endTag(null, "resources"); serial.endDocument(); serial.flush(); outStream.close(); } catch (IOException ex) { throw new AndrolibException( "Could not generate: " + valuesFile.getPath(), ex); } catch (DirectoryException ex) { throw new AndrolibException( "Could not generate: " + valuesFile.getPath(), ex); } } private void generatePublicXml(ResPackage pkg, Directory out, XmlSerializer serial) throws AndrolibException { try { OutputStream outStream = out.getFileOutput("values/public.xml"); serial.setOutput(outStream, null); serial.startDocument(null, null); serial.startTag(null, "resources"); for (ResResSpec spec : pkg.listResSpecs()) { serial.startTag(null, "public"); serial.attribute(null, "type", spec.getType().getName()); serial.attribute(null, "name", spec.getName()); serial.attribute(null, "id", String.format( "0x%08x", spec.getId().id)); serial.endTag(null, "public"); } serial.endTag(null, "resources"); serial.endDocument(); serial.flush(); outStream.close(); } catch (IOException ex) { throw new AndrolibException( "Could not generate public.xml file", ex); } catch (DirectoryException ex) { throw new AndrolibException( "Could not generate public.xml file", ex); } } private ResPackage[] getResPackagesFromApk(ExtFile apkFile, ResTable resTable) throws AndrolibException { try { return ARSCDecoder.decode( apkFile.getDirectory().getFileInput("resources.arsc"), resTable); } catch (DirectoryException ex) { throw new AndrolibException( "Could not load resources.arsc from file: " + apkFile, ex); } } private File getFrameworkApk(int id, String frameTag) throws AndrolibException { File dir = getFrameworkDir(); File apk = new File(dir, String.valueOf(id) + '-' + frameTag + ".apk"); if (apk.exists()) { return apk; } apk = new File(dir, String.valueOf(id) + ".apk"); if (apk.exists()) { return apk; } if (id == 1) { InputStream in = null; OutputStream out = null; try { in = AndrolibResources.class.getResourceAsStream( "/brut/androlib/android-framework.jar"); out = new FileOutputStream(apk); IOUtils.copy(in, out); return apk; } catch (IOException ex) { throw new AndrolibException(ex); } finally { if (in != null) { try { in.close(); } catch (IOException ex) {} } if (out != null) { try { out.close(); } catch (IOException ex) {} } } } throw new CantFindFrameworkResException(id); } private File getFrameworkDir() throws AndrolibException { File dir = new File(System.getProperty("user.home") + File.separatorChar + "apktool" + File.separatorChar + "framework"); if (! dir.exists()) { if (! dir.mkdirs()) { throw new AndrolibException("Can't create directory: " + dir); } } return dir; } public File getAndroidResourcesFile() throws AndrolibException { try { return Jar.getResourceAsFile("/brut/androlib/android-framework.jar"); } catch (BrutException ex) { throw new AndrolibException(ex); } } public File getHtcResourcesFile() throws AndrolibException { try { return Jar.getResourceAsFile( "/brut/androlib/com.htc.resources.apk"); } catch (BrutException ex) { throw new AndrolibException(ex); } } public static String escapeForResXml(String value) { if (value.isEmpty()) { return value; } StringBuilder out = new StringBuilder(value.length() + 10); char[] chars = value.toCharArray(); switch (chars[0]) { case '@': case '#': case '?': out.append('\\'); } boolean space = true; for (int i = 0; i < chars.length; i++) { char c = chars[i]; if (c == ' ') { if (space) { out.append("\\u0020"); } else { out.append(c); space = true; } continue; } space = false; switch (c) { case '\\': case '\'': case '"': out.append('\\'); break; case '\n': out.append("\\n"); continue; } out.append(c); } if (space && out.charAt(out.length() - 1) == ' ') { out.deleteCharAt(out.length() - 1); out.append("\\u0020"); } return out.toString(); } private final static Logger LOGGER = Logger.getLogger(AndrolibResources.class.getName()); }
AndrolibResources.getFrameworkApk(): fixed small bug, occuring when frameTag is null.
src/brut/androlib/res/AndrolibResources.java
AndrolibResources.getFrameworkApk(): fixed small bug, occuring when frameTag is null.
<ide><path>rc/brut/androlib/res/AndrolibResources.java <ide> private File getFrameworkApk(int id, String frameTag) <ide> throws AndrolibException { <ide> File dir = getFrameworkDir(); <del> <del> File apk = new File(dir, String.valueOf(id) + '-' + frameTag + ".apk"); <del> if (apk.exists()) { <del> return apk; <add> File apk; <add> <add> if (frameTag != null) { <add> apk = new File(dir, String.valueOf(id) + '-' + frameTag + ".apk"); <add> if (apk.exists()) { <add> return apk; <add> } <ide> } <ide> <ide> apk = new File(dir, String.valueOf(id) + ".apk");
Java
lgpl-2.1
71d0a81f18b1a5e1bd64668b79178cab2441ab7f
0
viktorbahr/jaer,SensorsINI/jaer,viktorbahr/jaer,SensorsINI/jaer,SensorsINI/jaer,viktorbahr/jaer,SensorsINI/jaer,viktorbahr/jaer,viktorbahr/jaer,viktorbahr/jaer,SensorsINI/jaer,SensorsINI/jaer,SensorsINI/jaer,viktorbahr/jaer,SensorsINI/jaer
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package ch.unizh.ini.jaer.projects.gesture.vlccontrol; import java.beans.PropertyChangeSupport; import java.io.*; import java.io.IOException; import java.nio.CharBuffer; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.commons.net.telnet.*; import org.apache.commons.net.telnet.TelnetClient; /** * Exposes control of VLC media player (videolan.org) from java. VLC must be set up to open a telnet control on localhost:4444. * <p> * For VLC 1.1.5, use the following setup to expose the remote control (rc) interrface for telnet control: * <p> * This setting is in VLC Tools/Preferences/Show settings (All)/Interface/Main interfaces. Select the "Remote Control Interface" and replace "oldrc" with "rc" in the text field. * In VLC Tools/Preferences/Show settings (All)/Interface/Main interfaces/RC/TCP command input, put the string "localhost:4444" in the text field. * * @author Tobi */ public class VLCControl extends TelnetClient implements Runnable, TelnetNotificationHandler { /** VLC should be started with as "vlc --rc-host=localhost:4444" */ public static final int VLC_PORT = 4444; static final Logger log = Logger.getLogger("VLCControl"); private CharBuffer cbuf = CharBuffer.allocate(1024); private static VLCControl staticInstance = null; // used to communicate among instances the active client private PropertyChangeSupport support=new PropertyChangeSupport(this); // listeners get informed by output from VLC strings public VLCControl() { } @Override public void disconnect() throws IOException { sendCommand("quit"); super.disconnect(); } public void connect() throws IOException { staticInstance = this; // used by reader to get input stream try { staticInstance.connect("localhost", VLC_PORT); Thread thread = new Thread(new VLCControl()); // starts the thread to get the text sent back from VLC thread.start(); staticInstance.registerNotifHandler(this); // notifications call back to logger Runtime.getRuntime().addShutdownHook(new Thread() { // shutdown hook here makes sure to disconnect cleanly, as long as we are not terminated @Override public void run() { try { if (isConnected()) { disconnect(); } } catch (IOException ex) { log.warning(ex.toString()); } } }); } catch (IOException e) { log.warning("couldn't connect to VLC - you may need to start VLC with command line \"vlc --rc-host=localhost:4444\""); throw new IOException(e); } } /** Sends a string command. Commands do not need to be terminated with a newline. <p> <pre> +----[ Remote control commands ] | add XYZ . . . . . . . . . . . . . . . . . . . . add XYZ to playlist | enqueue XYZ . . . . . . . . . . . . . . . . . queue XYZ to playlist | playlist . . . . . . . . . . . . . .show items currently in playlist | search [string] . . search for items in playlist (or reset search) | sort key . . . . . . . . . . . . . . . . . . . . . sort the playlist | sd [sd] . . . . . . . . . . . . . show services discovery or toggle | play . . . . . . . . . . . . . . . . . . . . . . . . . . play stream | stop . . . . . . . . . . . . . . . . . . . . . . . . . . stop stream | next . . . . . . . . . . . . . . . . . . . . . . next playlist item | prev . . . . . . . . . . . . . . . . . . . . previous playlist item | goto . . . . . . . . . . . . . . . . . . . . . . goto item at index | repeat [on|off] . . . . . . . . . . . . . . toggle playlist repeat | loop [on|off] . . . . . . . . . . . . . . . . toggle playlist loop | random [on|off] . . . . . . . . . . . . . . toggle playlist random | clear . . . . . . . . . . . . . . . . . . . . . .clear the playlist | status . . . . . . . . . . . . . . . . . . . current playlist status | title [X] . . . . . . . . . . . . . . set/get title in current item | title_n . . . . . . . . . . . . . . . . next title in current item | title_p . . . . . . . . . . . . . . previous title in current item | chapter [X] . . . . . . . . . . . . set/get chapter in current item | chapter_n . . . . . . . . . . . . . . next chapter in current item | chapter_p . . . . . . . . . . . . previous chapter in current item | | seek X . . . . . . . . . . . seek in seconds, for instance `seek 12' | pause . . . . . . . . . . . . . . . . . . . . . . . . toggle pause | fastforward . . . . . . . . . . . . . . . . . . set to maximum rate | rewind . . . . . . . . . . . . . . . . . . . . . set to minimum rate | faster . . . . . . . . . . . . . . . . . . faster playing of stream | slower . . . . . . . . . . . . . . . . . . slower playing of stream | normal . . . . . . . . . . . . . . . . . . normal playing of stream | rate [playback rate] . . . . . . . . . . set playback rate to value | frame . . . . . . . . . . . . . . . . . . . . . play frame by frame | fullscreen, f, F [on|off] . . . . . . . . . . . . toggle fullscreen | info . . . . . . . . . . . . . .information about the current stream | stats . . . . . . . . . . . . . . . . show statistical information | get_time . . . . . . . . . .seconds elapsed since stream's beginning | is_playing . . . . . . . . . . . . 1 if a stream plays, 0 otherwise | get_title . . . . . . . . . . . . . the title of the current stream | get_length . . . . . . . . . . . . the length of the current stream | | volume [X] . . . . . . . . . . . . . . . . . . set/get audio volume | volup [X] . . . . . . . . . . . . . . . .raise audio volume X steps | voldown [X] . . . . . . . . . . . . . . lower audio volume X steps | adev [X] . . . . . . . . . . . . . . . . . . . .set/get audio device | achan [X] . . . . . . . . . . . . . . . . . .set/get audio channels | atrack [X] . . . . . . . . . . . . . . . . . . . set/get audio track | vtrack [X] . . . . . . . . . . . . . . . . . . . set/get video track | vratio [X] . . . . . . . . . . . . . . . .set/get video aspect ratio | vcrop, crop [X] . . . . . . . . . . . . . . . . set/get video crop | vzoom, zoom [X] . . . . . . . . . . . . . . . . set/get video zoom | snapshot . . . . . . . . . . . . . . . . . . . . take video snapshot | strack [X] . . . . . . . . . . . . . . . . . set/get subtitles track | hotkey, key [hotkey name] . . . . . . . . . . simulate hotkey press | menu [on|off|up|down|left|right|select] . . . . . . . . . .use menu | | set [var [value]] . . . . . . . . . . . . . . . . . set/get env var | save_env . . . . . . . . . . . . save env vars (for future clients) | alias [cmd] . . . . . . . . . . . . . . . . set/get command aliases | description . . . . . . . . . . . . . . . . . .describe this module | license . . . . . . . . . . . . . . . . print VLC's license message | help, ? [pattern] . . . . . . . . . . . . . . . . . .a help message | longhelp [pattern] . . . . . . . . . . . . . . a longer help message | logout . . . . . . . . . . . . . . exit (if in a socket connection) | quit . . . . . . . . quit VLC (or logout if in a socket connection) | shutdown . . . . . . . . . . . . . . . . . . . . . . . .shutdown VLC +----[ end of help ] </pre> */ public String sendCommand(String s) throws IOException { if (!isConnected()) { connect(); } if (s == null) { return null; } if (!s.endsWith("\n")) { s = s + "\n"; } getOutputStream().write(s.getBytes()); getOutputStream().flush(); return s; } public static String PAUSE="pause", PLAY="play", STOP="stop", NEXT="next", PREV="prev", VOLUP="volup 1", VOLDOWN = "voldown 1"; public static final String CLIENT_MESSAGE="ClientMessage"; /*** * Reader thread. * Reads lines from the TelnetClient and echoes them * on the logger. * PropertyChangeListeners are called with CLIENT_MESSAGE and String sent from VLC. ***/ @Override public void run() { InputStream instr = staticInstance.getInputStream(); byte[] buff = new byte[1024]; int ret_read = 0; try { do { ret_read = instr.read(buff); if (ret_read > 0) { String s=new String(buff, 0, ret_read); log.info(s); staticInstance.getSupport().firePropertyChange(CLIENT_MESSAGE, null, s); // listener on static instance that actually is connected gets the message } } while (ret_read >= 0); } catch (Exception e) { log.log(Level.WARNING, "Reader ending - Exception while reading socket:{0}", e.getMessage()); } } /*** * Callback method called when TelnetClient receives an option * negotiation command. * <p> * @param negotiation_code - type of negotiation command received * (RECEIVED_DO, RECEIVED_DONT, RECEIVED_WILL, RECEIVED_WONT) * <p> * @param option_code - code of the option negotiated * <p> ***/ @Override public void receivedNegotiation(int negotiation_code, int option_code) { String command = null; if (negotiation_code == TelnetNotificationHandler.RECEIVED_DO) { command = "DO"; } else if (negotiation_code == TelnetNotificationHandler.RECEIVED_DONT) { command = "DONT"; } else if (negotiation_code == TelnetNotificationHandler.RECEIVED_WILL) { command = "WILL"; } else if (negotiation_code == TelnetNotificationHandler.RECEIVED_WONT) { command = "WONT"; } log.log(Level.INFO, "Received {0} for option code {1}", new Object[]{command, option_code}); } /** * @return the support. Listeners can get the stuff sent back from VLC with CLIENT_MESSAGE events. */ public PropertyChangeSupport getSupport() { return support; } }
src/ch/unizh/ini/jaer/projects/gesture/vlccontrol/VLCControl.java
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package ch.unizh.ini.jaer.projects.gesture.vlccontrol; import java.beans.PropertyChangeSupport; import java.io.*; import java.io.IOException; import java.nio.CharBuffer; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.commons.net.telnet.*; import org.apache.commons.net.telnet.TelnetClient; /** * Exposes control of VLC media player (videolan.org) from java. VLC must be set up to open a telnet control on localhost:4444. * <p> * For VLC 1.1.5, use the following setup to expose the remote control (rc) interrface for telnet control: * <p> * This setting is in VLC Tools/Preferences/Show settings (All)/Interface/Main interfaces. Select the "Remote Control Interface" and replace "oldrc" with "rc" in the text field. * In VLC Tools/Preferences/Show settings (All)/Interface/Main interfaces/RC/TCP command input, put the string "localhost:4444" in the text field. * * @author Tobi */ public class VLCControl extends TelnetClient implements Runnable, TelnetNotificationHandler { /** VLC should be started with as "vlc --rc-host=localhost:4444" */ public static final int VLC_PORT = 4444; static final Logger log = Logger.getLogger("VLCControl"); CharBuffer cbuf = CharBuffer.allocate(1024); private static VLCControl staticInstance = null; // used to communicate among instances the active client private PropertyChangeSupport support=new PropertyChangeSupport(this); // listeners get informed by output from VLC strings public VLCControl() { } @Override public void disconnect() throws IOException { sendCommand("quit"); super.disconnect(); } public void connect() throws IOException { staticInstance = this; // used by reader to get input stream try { staticInstance.connect("localhost", VLC_PORT); Thread thread = new Thread(new VLCControl()); // starts the thread to get the text sent back from VLC thread.start(); staticInstance.registerNotifHandler(this); // notifications call back to logger Runtime.getRuntime().addShutdownHook(new Thread() { // shutdown hook here makes sure to disconnect cleanly, as long as we are not terminated @Override public void run() { try { if (isConnected()) { disconnect(); } } catch (IOException ex) { log.warning(ex.toString()); } } }); } catch (IOException e) { log.warning("couldn't connect to VLC - you may need to start VLC with command line \"vlc --rc-host=localhost:4444\""); throw new IOException(e); } } /** Sends a string command. Commands do not need to be terminated with a newline. <p> <pre> +----[ Remote control commands ] | add XYZ . . . . . . . . . . . . . . . . . . . . add XYZ to playlist | enqueue XYZ . . . . . . . . . . . . . . . . . queue XYZ to playlist | playlist . . . . . . . . . . . . . .show items currently in playlist | search [string] . . search for items in playlist (or reset search) | sort key . . . . . . . . . . . . . . . . . . . . . sort the playlist | sd [sd] . . . . . . . . . . . . . show services discovery or toggle | play . . . . . . . . . . . . . . . . . . . . . . . . . . play stream | stop . . . . . . . . . . . . . . . . . . . . . . . . . . stop stream | next . . . . . . . . . . . . . . . . . . . . . . next playlist item | prev . . . . . . . . . . . . . . . . . . . . previous playlist item | goto . . . . . . . . . . . . . . . . . . . . . . goto item at index | repeat [on|off] . . . . . . . . . . . . . . toggle playlist repeat | loop [on|off] . . . . . . . . . . . . . . . . toggle playlist loop | random [on|off] . . . . . . . . . . . . . . toggle playlist random | clear . . . . . . . . . . . . . . . . . . . . . .clear the playlist | status . . . . . . . . . . . . . . . . . . . current playlist status | title [X] . . . . . . . . . . . . . . set/get title in current item | title_n . . . . . . . . . . . . . . . . next title in current item | title_p . . . . . . . . . . . . . . previous title in current item | chapter [X] . . . . . . . . . . . . set/get chapter in current item | chapter_n . . . . . . . . . . . . . . next chapter in current item | chapter_p . . . . . . . . . . . . previous chapter in current item | | seek X . . . . . . . . . . . seek in seconds, for instance `seek 12' | pause . . . . . . . . . . . . . . . . . . . . . . . . toggle pause | fastforward . . . . . . . . . . . . . . . . . . set to maximum rate | rewind . . . . . . . . . . . . . . . . . . . . . set to minimum rate | faster . . . . . . . . . . . . . . . . . . faster playing of stream | slower . . . . . . . . . . . . . . . . . . slower playing of stream | normal . . . . . . . . . . . . . . . . . . normal playing of stream | rate [playback rate] . . . . . . . . . . set playback rate to value | frame . . . . . . . . . . . . . . . . . . . . . play frame by frame | fullscreen, f, F [on|off] . . . . . . . . . . . . toggle fullscreen | info . . . . . . . . . . . . . .information about the current stream | stats . . . . . . . . . . . . . . . . show statistical information | get_time . . . . . . . . . .seconds elapsed since stream's beginning | is_playing . . . . . . . . . . . . 1 if a stream plays, 0 otherwise | get_title . . . . . . . . . . . . . the title of the current stream | get_length . . . . . . . . . . . . the length of the current stream | | volume [X] . . . . . . . . . . . . . . . . . . set/get audio volume | volup [X] . . . . . . . . . . . . . . . .raise audio volume X steps | voldown [X] . . . . . . . . . . . . . . lower audio volume X steps | adev [X] . . . . . . . . . . . . . . . . . . . .set/get audio device | achan [X] . . . . . . . . . . . . . . . . . .set/get audio channels | atrack [X] . . . . . . . . . . . . . . . . . . . set/get audio track | vtrack [X] . . . . . . . . . . . . . . . . . . . set/get video track | vratio [X] . . . . . . . . . . . . . . . .set/get video aspect ratio | vcrop, crop [X] . . . . . . . . . . . . . . . . set/get video crop | vzoom, zoom [X] . . . . . . . . . . . . . . . . set/get video zoom | snapshot . . . . . . . . . . . . . . . . . . . . take video snapshot | strack [X] . . . . . . . . . . . . . . . . . set/get subtitles track | hotkey, key [hotkey name] . . . . . . . . . . simulate hotkey press | menu [on|off|up|down|left|right|select] . . . . . . . . . .use menu | | set [var [value]] . . . . . . . . . . . . . . . . . set/get env var | save_env . . . . . . . . . . . . save env vars (for future clients) | alias [cmd] . . . . . . . . . . . . . . . . set/get command aliases | description . . . . . . . . . . . . . . . . . .describe this module | license . . . . . . . . . . . . . . . . print VLC's license message | help, ? [pattern] . . . . . . . . . . . . . . . . . .a help message | longhelp [pattern] . . . . . . . . . . . . . . a longer help message | logout . . . . . . . . . . . . . . exit (if in a socket connection) | quit . . . . . . . . quit VLC (or logout if in a socket connection) | shutdown . . . . . . . . . . . . . . . . . . . . . . . .shutdown VLC +----[ end of help ] </pre> */ public String sendCommand(String s) throws IOException { if (!isConnected()) { connect(); } if (s == null) { return null; } if (!s.endsWith("\n")) { s = s + "\n"; } getOutputStream().write(s.getBytes()); getOutputStream().flush(); return s; } public static String PAUSE="pause", PLAY="play", STOP="stop", NEXT="next", PREV="prev", VOLUP="volup 1", VOLDOWN = "voldown 1"; public static final String CLIENT_MESSAGE="ClientMessage"; /*** * Reader thread. * Reads lines from the TelnetClient and echoes them * on the logger. * PropertyChangeListeners are called with CLIENT_MESSAGE and String sent from VLC. ***/ @Override public void run() { InputStream instr = staticInstance.getInputStream(); byte[] buff = new byte[1024]; int ret_read = 0; try { do { ret_read = instr.read(buff); if (ret_read > 0) { String s=new String(buff, 0, ret_read); log.info(s); staticInstance.getSupport().firePropertyChange(CLIENT_MESSAGE, null, s); // listener on static instance that actually is connected gets the message } } while (ret_read >= 0); } catch (Exception e) { log.log(Level.WARNING, "Reader ending - Exception while reading socket:{0}", e.getMessage()); } } /*** * Callback method called when TelnetClient receives an option * negotiation command. * <p> * @param negotiation_code - type of negotiation command received * (RECEIVED_DO, RECEIVED_DONT, RECEIVED_WILL, RECEIVED_WONT) * <p> * @param option_code - code of the option negotiated * <p> ***/ @Override public void receivedNegotiation(int negotiation_code, int option_code) { String command = null; if (negotiation_code == TelnetNotificationHandler.RECEIVED_DO) { command = "DO"; } else if (negotiation_code == TelnetNotificationHandler.RECEIVED_DONT) { command = "DONT"; } else if (negotiation_code == TelnetNotificationHandler.RECEIVED_WILL) { command = "WILL"; } else if (negotiation_code == TelnetNotificationHandler.RECEIVED_WONT) { command = "WONT"; } log.log(Level.INFO, "Received {0} for option code {1}", new Object[]{command, option_code}); } /** * @return the support. Listeners can get the stuff sent back from VLC with CLIENT_MESSAGE events. */ public PropertyChangeSupport getSupport() { return support; } }
made field private git-svn-id: e3d3b427d532171a6bd7557d8a4952a393b554a2@2477 b7f4320f-462c-0410-a916-d9f35bb82d52
src/ch/unizh/ini/jaer/projects/gesture/vlccontrol/VLCControl.java
made field private
<ide><path>rc/ch/unizh/ini/jaer/projects/gesture/vlccontrol/VLCControl.java <ide> /** VLC should be started with as "vlc --rc-host=localhost:4444" */ <ide> public static final int VLC_PORT = 4444; <ide> static final Logger log = Logger.getLogger("VLCControl"); <del> CharBuffer cbuf = CharBuffer.allocate(1024); <add> private CharBuffer cbuf = CharBuffer.allocate(1024); <ide> private static VLCControl staticInstance = null; // used to communicate among instances the active client <ide> private PropertyChangeSupport support=new PropertyChangeSupport(this); // listeners get informed by output from VLC strings <ide>
Java
apache-2.0
5dfcd309f10e5bd6a918f7fdff3f44a3dff2374a
0
jcshen007/cloudstack,mufaddalq/cloudstack-datera-driver,DaanHoogland/cloudstack,DaanHoogland/cloudstack,resmo/cloudstack,jcshen007/cloudstack,DaanHoogland/cloudstack,resmo/cloudstack,GabrielBrascher/cloudstack,mufaddalq/cloudstack-datera-driver,DaanHoogland/cloudstack,mufaddalq/cloudstack-datera-driver,resmo/cloudstack,wido/cloudstack,jcshen007/cloudstack,resmo/cloudstack,GabrielBrascher/cloudstack,wido/cloudstack,jcshen007/cloudstack,GabrielBrascher/cloudstack,wido/cloudstack,jcshen007/cloudstack,DaanHoogland/cloudstack,jcshen007/cloudstack,DaanHoogland/cloudstack,wido/cloudstack,mufaddalq/cloudstack-datera-driver,wido/cloudstack,jcshen007/cloudstack,resmo/cloudstack,resmo/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,mufaddalq/cloudstack-datera-driver,GabrielBrascher/cloudstack,mufaddalq/cloudstack-datera-driver,wido/cloudstack,DaanHoogland/cloudstack,wido/cloudstack,GabrielBrascher/cloudstack,resmo/cloudstack
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.cloud.hypervisor.kvm.resource; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.InetAddress; import java.net.URISyntaxException; import java.net.URL; import java.net.URLConnection; import java.text.DateFormat; import java.text.MessageFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.ejb.Local; import javax.naming.ConfigurationException; import org.apache.log4j.Logger; import org.libvirt.Connect; import org.libvirt.Domain; import org.libvirt.DomainInfo; import org.libvirt.DomainInterfaceStats; import org.libvirt.DomainSnapshot; import org.libvirt.LibvirtException; import org.libvirt.NodeInfo; import com.cloud.agent.api.Answer; import com.cloud.agent.api.AttachIsoCommand; import com.cloud.agent.api.AttachVolumeAnswer; import com.cloud.agent.api.AttachVolumeCommand; import com.cloud.agent.api.BackupSnapshotAnswer; import com.cloud.agent.api.BackupSnapshotCommand; import com.cloud.agent.api.CheckHealthAnswer; import com.cloud.agent.api.CheckHealthCommand; import com.cloud.agent.api.CheckNetworkAnswer; import com.cloud.agent.api.CheckNetworkCommand; import com.cloud.agent.api.CheckStateCommand; import com.cloud.agent.api.CheckVirtualMachineAnswer; import com.cloud.agent.api.CheckVirtualMachineCommand; import com.cloud.agent.api.CleanupNetworkRulesCmd; import com.cloud.agent.api.Command; import com.cloud.agent.api.CreatePrivateTemplateFromSnapshotCommand; import com.cloud.agent.api.CreatePrivateTemplateFromVolumeCommand; import com.cloud.agent.api.CreateStoragePoolCommand; import com.cloud.agent.api.CreateVolumeFromSnapshotAnswer; import com.cloud.agent.api.CreateVolumeFromSnapshotCommand; import com.cloud.agent.api.DeleteSnapshotBackupAnswer; import com.cloud.agent.api.DeleteSnapshotBackupCommand; import com.cloud.agent.api.DeleteSnapshotsDirCommand; import com.cloud.agent.api.DeleteStoragePoolCommand; import com.cloud.agent.api.FenceAnswer; import com.cloud.agent.api.FenceCommand; import com.cloud.agent.api.GetHostStatsAnswer; import com.cloud.agent.api.GetHostStatsCommand; import com.cloud.agent.api.GetStorageStatsAnswer; import com.cloud.agent.api.GetStorageStatsCommand; import com.cloud.agent.api.GetVmStatsAnswer; import com.cloud.agent.api.GetVmStatsCommand; import com.cloud.agent.api.GetVncPortAnswer; import com.cloud.agent.api.GetVncPortCommand; import com.cloud.agent.api.HostStatsEntry; import com.cloud.agent.api.MaintainAnswer; import com.cloud.agent.api.MaintainCommand; import com.cloud.agent.api.ManageSnapshotAnswer; import com.cloud.agent.api.ManageSnapshotCommand; import com.cloud.agent.api.MigrateAnswer; import com.cloud.agent.api.MigrateCommand; import com.cloud.agent.api.ModifySshKeysCommand; import com.cloud.agent.api.ModifyStoragePoolAnswer; import com.cloud.agent.api.ModifyStoragePoolCommand; import com.cloud.agent.api.NetworkRulesSystemVmCommand; import com.cloud.agent.api.NetworkUsageAnswer; import com.cloud.agent.api.NetworkUsageCommand; import com.cloud.agent.api.PingCommand; import com.cloud.agent.api.PingRoutingCommand; import com.cloud.agent.api.PingRoutingWithNwGroupsCommand; import com.cloud.agent.api.PingTestCommand; import com.cloud.agent.api.PlugNicAnswer; import com.cloud.agent.api.PlugNicCommand; import com.cloud.agent.api.PrepareForMigrationAnswer; import com.cloud.agent.api.PrepareForMigrationCommand; import com.cloud.agent.api.ReadyAnswer; import com.cloud.agent.api.ReadyCommand; import com.cloud.agent.api.RebootAnswer; import com.cloud.agent.api.RebootCommand; import com.cloud.agent.api.RebootRouterCommand; import com.cloud.agent.api.SecurityGroupRuleAnswer; import com.cloud.agent.api.SecurityGroupRulesCmd; import com.cloud.agent.api.SetupGuestNetworkAnswer; import com.cloud.agent.api.SetupGuestNetworkCommand; import com.cloud.agent.api.StartAnswer; import com.cloud.agent.api.StartCommand; import com.cloud.agent.api.StartupCommand; import com.cloud.agent.api.StartupRoutingCommand; import com.cloud.agent.api.StartupStorageCommand; import com.cloud.agent.api.StopAnswer; import com.cloud.agent.api.StopCommand; import com.cloud.agent.api.UnPlugNicAnswer; import com.cloud.agent.api.UnPlugNicCommand; import com.cloud.agent.api.UpgradeSnapshotCommand; import com.cloud.agent.api.VmStatsEntry; import com.cloud.agent.api.check.CheckSshAnswer; import com.cloud.agent.api.check.CheckSshCommand; import com.cloud.agent.api.proxy.CheckConsoleProxyLoadCommand; import com.cloud.agent.api.proxy.ConsoleProxyLoadAnswer; import com.cloud.agent.api.proxy.WatchConsoleProxyLoadCommand; import com.cloud.agent.api.routing.IpAssocAnswer; import com.cloud.agent.api.routing.IpAssocCommand; import com.cloud.agent.api.routing.IpAssocVpcCommand; import com.cloud.agent.api.routing.NetworkElementCommand; import com.cloud.agent.api.routing.SetNetworkACLAnswer; import com.cloud.agent.api.routing.SetNetworkACLCommand; import com.cloud.agent.api.routing.SetSourceNatAnswer; import com.cloud.agent.api.routing.SetSourceNatCommand; import com.cloud.agent.api.storage.CopyVolumeAnswer; import com.cloud.agent.api.storage.CopyVolumeCommand; import com.cloud.agent.api.storage.CreateAnswer; import com.cloud.agent.api.storage.CreateCommand; import com.cloud.agent.api.storage.CreatePrivateTemplateAnswer; import com.cloud.agent.api.storage.DestroyCommand; import com.cloud.agent.api.storage.PrimaryStorageDownloadAnswer; import com.cloud.agent.api.storage.PrimaryStorageDownloadCommand; import com.cloud.agent.api.storage.ResizeVolumeCommand; import com.cloud.agent.api.storage.ResizeVolumeAnswer; import com.cloud.agent.api.to.IpAddressTO; import com.cloud.agent.api.to.NicTO; import com.cloud.agent.api.to.StorageFilerTO; import com.cloud.agent.api.to.VirtualMachineTO; import com.cloud.agent.api.to.VolumeTO; import com.cloud.agent.resource.virtualnetwork.VirtualRoutingResource; import com.cloud.dc.Vlan; import com.cloud.exception.InternalErrorException; import com.cloud.host.Host.Type; import com.cloud.hypervisor.Hypervisor.HypervisorType; import com.cloud.hypervisor.kvm.resource.KVMHABase.NfsStoragePool; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.ClockDef; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.ConsoleDef; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.CpuTuneDef; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.DevicesDef; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.DiskDef; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.DiskDef.diskProtocol; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.FeaturesDef; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.GraphicDef; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.GuestDef; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.GuestResourceDef; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.InputDef; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.InterfaceDef; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.InterfaceDef.hostNicType; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.SerialDef; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.TermPolicy; import com.cloud.hypervisor.kvm.storage.KVMPhysicalDisk; import com.cloud.hypervisor.kvm.storage.KVMPhysicalDisk.PhysicalDiskFormat; import com.cloud.hypervisor.kvm.storage.KVMStoragePool; import com.cloud.hypervisor.kvm.storage.KVMStoragePoolManager; import com.cloud.network.Networks.BroadcastDomainType; import com.cloud.network.Networks.IsolationType; import com.cloud.network.Networks.RouterPrivateIpStrategy; import com.cloud.network.Networks.TrafficType; import com.cloud.network.PhysicalNetworkSetupInfo; import com.cloud.resource.ServerResource; import com.cloud.resource.ServerResourceBase; import com.cloud.storage.JavaStorageLayer; import com.cloud.storage.Storage; import com.cloud.storage.Storage.ImageFormat; import com.cloud.storage.Storage.StoragePoolType; import com.cloud.storage.StorageLayer; import com.cloud.storage.Volume; import com.cloud.storage.template.Processor; import com.cloud.storage.template.Processor.FormatInfo; import com.cloud.storage.template.QCOW2Processor; import com.cloud.storage.template.TemplateInfo; import com.cloud.storage.template.TemplateLocation; import com.cloud.utils.NumbersUtil; import com.cloud.utils.Pair; import com.cloud.utils.FileUtil; import com.cloud.utils.PropertiesUtil; import com.cloud.utils.exception.CloudRuntimeException; import com.cloud.utils.net.NetUtils; import com.cloud.utils.script.OutputInterpreter; import com.cloud.utils.script.Script; import com.cloud.vm.DiskProfile; import com.cloud.vm.VirtualMachine; import com.cloud.vm.VirtualMachine.State; import com.cloud.vm.VirtualMachineName; /** * LibvirtComputingResource execute requests on the computing/routing host using * the libvirt API * * @config {@table || Param Name | Description | Values | Default || || * hypervisor.type | type of local hypervisor | string | kvm || || * hypervisor.uri | local hypervisor to connect to | URI | * qemu:///system || || domr.arch | instruction set for domr template | * string | i686 || || private.bridge.name | private bridge where the * domrs have their private interface | string | vmops0 || || * public.bridge.name | public bridge where the domrs have their public * interface | string | br0 || || private.network.name | name of the * network where the domrs have their private interface | string | * vmops-private || || private.ipaddr.start | start of the range of * private ip addresses for domrs | ip address | 192.168.166.128 || || * private.ipaddr.end | end of the range of private ip addresses for * domrs | ip address | start + 126 || || private.macaddr.start | start * of the range of private mac addresses for domrs | mac address | * 00:16:3e:77:e2:a0 || || private.macaddr.end | end of the range of * private mac addresses for domrs | mac address | start + 126 || || * pool | the parent of the storage pool hierarchy * } **/ @Local(value = { ServerResource.class }) public class LibvirtComputingResource extends ServerResourceBase implements ServerResource { private static final Logger s_logger = Logger .getLogger(LibvirtComputingResource.class); private String _modifyVlanPath; private String _versionstringpath; private String _patchdomrPath; private String _createvmPath; private String _manageSnapshotPath; private String _resizeVolumePath; private String _createTmplPath; private String _heartBeatPath; private String _securityGroupPath; private String _routerProxyPath; private String _host; private String _dcId; private String _pod; private String _clusterId; private int _migrateSpeed; private long _hvVersion; private KVMHAMonitor _monitor; private final String _SSHKEYSPATH = "/root/.ssh"; private final String _SSHPRVKEYPATH = _SSHKEYSPATH + File.separator + "id_rsa.cloud"; private final String _SSHPUBKEYPATH = _SSHKEYSPATH + File.separator + "id_rsa.pub.cloud"; private String _mountPoint = "/mnt"; StorageLayer _storage; private KVMStoragePoolManager _storagePoolMgr; private VifDriver _vifDriver; private static final class KeyValueInterpreter extends OutputInterpreter { private final Map<String, String> map = new HashMap<String, String>(); @Override public String interpret(BufferedReader reader) throws IOException { String line = null; int numLines = 0; while ((line = reader.readLine()) != null) { String[] toks = line.trim().split("="); if (toks.length < 2) { s_logger.warn("Failed to parse Script output: " + line); } else { map.put(toks[0].trim(), toks[1].trim()); } numLines++; } if (numLines == 0) { s_logger.warn("KeyValueInterpreter: no output lines?"); } return null; } public Map<String, String> getKeyValues() { return map; } } @Override protected String getDefaultScriptsDir() { return null; } protected static MessageFormat SnapshotXML = new MessageFormat( " <domainsnapshot>" + " <name>{0}</name>" + " <domain>" + " <uuid>{1}</uuid>" + " </domain>" + " </domainsnapshot>"); protected String _hypervisorType; protected String _hypervisorURI; protected String _hypervisorPath; protected String _sysvmISOPath; protected String _privNwName; protected String _privBridgeName; protected String _linkLocalBridgeName; protected String _publicBridgeName; protected String _guestBridgeName; protected String _privateIp; protected String _pool; protected String _localGateway; private boolean _can_bridge_firewall; protected String _localStoragePath; protected String _localStorageUUID; private final Map <String, String> _pifs = new HashMap<String, String>(); private final Map<String, Map<String, String>> hostNetInfo = new HashMap<String, Map<String, String>>(); private final Map<String, vmStats> _vmStats = new ConcurrentHashMap<String, vmStats>(); protected boolean _disconnected = true; protected int _timeout; protected int _cmdsTimeout; protected int _stopTimeout; protected static HashMap<DomainInfo.DomainState, State> s_statesTable; static { s_statesTable = new HashMap<DomainInfo.DomainState, State>(); s_statesTable.put(DomainInfo.DomainState.VIR_DOMAIN_SHUTOFF, State.Stopped); s_statesTable.put(DomainInfo.DomainState.VIR_DOMAIN_PAUSED, State.Running); s_statesTable.put(DomainInfo.DomainState.VIR_DOMAIN_RUNNING, State.Running); s_statesTable.put(DomainInfo.DomainState.VIR_DOMAIN_BLOCKED, State.Running); s_statesTable.put(DomainInfo.DomainState.VIR_DOMAIN_NOSTATE, State.Unknown); s_statesTable.put(DomainInfo.DomainState.VIR_DOMAIN_SHUTDOWN, State.Stopping); } protected HashMap<String, State> _vms = new HashMap<String, State>(20); protected List<String> _vmsKilled = new ArrayList<String>(); private VirtualRoutingResource _virtRouterResource; private String _pingTestPath; private int _dom0MinMem; protected enum BridgeType { NATIVE, OPENVSWITCH } protected BridgeType _bridgeType; private String getEndIpFromStartIp(String startIp, int numIps) { String[] tokens = startIp.split("[.]"); assert (tokens.length == 4); int lastbyte = Integer.parseInt(tokens[3]); lastbyte = lastbyte + numIps; tokens[3] = Integer.toString(lastbyte); StringBuilder end = new StringBuilder(15); end.append(tokens[0]).append(".").append(tokens[1]).append(".") .append(tokens[2]).append(".").append(tokens[3]); return end.toString(); } private Map<String, Object> getDeveloperProperties() throws ConfigurationException { final File file = PropertiesUtil.findConfigFile("developer.properties"); if (file == null) { throw new ConfigurationException( "Unable to find developer.properties."); } s_logger.info("developer.properties found at " + file.getAbsolutePath()); Properties properties = new Properties(); try { properties.load(new FileInputStream(file)); String startMac = (String) properties.get("private.macaddr.start"); if (startMac == null) { throw new ConfigurationException( "Developers must specify start mac for private ip range"); } String startIp = (String) properties.get("private.ipaddr.start"); if (startIp == null) { throw new ConfigurationException( "Developers must specify start ip for private ip range"); } final Map<String, Object> params = PropertiesUtil.toMap(properties); String endIp = (String) properties.get("private.ipaddr.end"); if (endIp == null) { endIp = getEndIpFromStartIp(startIp, 16); params.put("private.ipaddr.end", endIp); } return params; } catch (final FileNotFoundException ex) { throw new CloudRuntimeException("Cannot find the file: " + file.getAbsolutePath(), ex); } catch (final IOException ex) { throw new CloudRuntimeException("IOException in reading " + file.getAbsolutePath(), ex); } } protected String getDefaultNetworkScriptsDir() { return "scripts/vm/network/vnet"; } protected String getDefaultStorageScriptsDir() { return "scripts/storage/qcow2"; } protected String getDefaultKvmScriptsDir() { return "scripts/vm/hypervisor/kvm"; } protected String getDefaultDomrScriptsDir() { return "scripts/network/domr/kvm"; } @Override public boolean configure(String name, Map<String, Object> params) throws ConfigurationException { boolean success = super.configure(name, params); if (!success) { return false; } _storage = new JavaStorageLayer(); _storage.configure("StorageLayer", params); String domrScriptsDir = (String) params.get("domr.scripts.dir"); if (domrScriptsDir == null) { domrScriptsDir = getDefaultDomrScriptsDir(); } String kvmScriptsDir = (String) params.get("kvm.scripts.dir"); if (kvmScriptsDir == null) { kvmScriptsDir = getDefaultKvmScriptsDir(); } String networkScriptsDir = (String) params.get("network.scripts.dir"); if (networkScriptsDir == null) { networkScriptsDir = getDefaultNetworkScriptsDir(); } String storageScriptsDir = (String) params.get("storage.scripts.dir"); if (storageScriptsDir == null) { storageScriptsDir = getDefaultStorageScriptsDir(); } String bridgeType = (String) params.get("network.bridge.type"); if (bridgeType == null) { _bridgeType = BridgeType.NATIVE; } else { _bridgeType = BridgeType.valueOf(bridgeType.toUpperCase()); } params.put("domr.scripts.dir", domrScriptsDir); _virtRouterResource = new VirtualRoutingResource(); success = _virtRouterResource.configure(name, params); if (!success) { return false; } _host = (String) params.get("host"); if (_host == null) { _host = "localhost"; } _dcId = (String) params.get("zone"); if (_dcId == null) { _dcId = "default"; } _pod = (String) params.get("pod"); if (_pod == null) { _pod = "default"; } _clusterId = (String) params.get("cluster"); _modifyVlanPath = Script.findScript(networkScriptsDir, "modifyvlan.sh"); if (_modifyVlanPath == null) { throw new ConfigurationException("Unable to find modifyvlan.sh"); } _versionstringpath = Script.findScript(kvmScriptsDir, "versions.sh"); if (_versionstringpath == null) { throw new ConfigurationException("Unable to find versions.sh"); } _patchdomrPath = Script.findScript(kvmScriptsDir + "/patch/", "rundomrpre.sh"); if (_patchdomrPath == null) { throw new ConfigurationException("Unable to find rundomrpre.sh"); } _heartBeatPath = Script.findScript(kvmScriptsDir, "kvmheartbeat.sh"); if (_heartBeatPath == null) { throw new ConfigurationException("Unable to find kvmheartbeat.sh"); } _createvmPath = Script.findScript(storageScriptsDir, "createvm.sh"); if (_createvmPath == null) { throw new ConfigurationException("Unable to find the createvm.sh"); } _manageSnapshotPath = Script.findScript(storageScriptsDir, "managesnapshot.sh"); if (_manageSnapshotPath == null) { throw new ConfigurationException( "Unable to find the managesnapshot.sh"); } _resizeVolumePath = Script.findScript(storageScriptsDir, "resizevolume.sh"); if (_resizeVolumePath == null) { throw new ConfigurationException( "Unable to find the resizevolume.sh"); } _createTmplPath = Script .findScript(storageScriptsDir, "createtmplt.sh"); if (_createTmplPath == null) { throw new ConfigurationException( "Unable to find the createtmplt.sh"); } _securityGroupPath = Script.findScript(networkScriptsDir, "security_group.py"); if (_securityGroupPath == null) { throw new ConfigurationException( "Unable to find the security_group.py"); } _routerProxyPath = Script.findScript("scripts/network/domr/", "router_proxy.sh"); if (_routerProxyPath == null) { throw new ConfigurationException( "Unable to find the router_proxy.sh"); } String value = (String) params.get("developer"); boolean isDeveloper = Boolean.parseBoolean(value); if (isDeveloper) { params.putAll(getDeveloperProperties()); } _pool = (String) params.get("pool"); if (_pool == null) { _pool = "/root"; } String instance = (String) params.get("instance"); _hypervisorType = (String) params.get("hypervisor.type"); if (_hypervisorType == null) { _hypervisorType = "kvm"; } _hypervisorURI = (String) params.get("hypervisor.uri"); if (_hypervisorURI == null) { _hypervisorURI = "qemu:///system"; } String startMac = (String) params.get("private.macaddr.start"); if (startMac == null) { startMac = "00:16:3e:77:e2:a0"; } String startIp = (String) params.get("private.ipaddr.start"); if (startIp == null) { startIp = "192.168.166.128"; } _pingTestPath = Script.findScript(kvmScriptsDir, "pingtest.sh"); if (_pingTestPath == null) { throw new ConfigurationException("Unable to find the pingtest.sh"); } _linkLocalBridgeName = (String) params.get("private.bridge.name"); if (_linkLocalBridgeName == null) { if (isDeveloper) { _linkLocalBridgeName = "cloud-" + instance + "-0"; } else { _linkLocalBridgeName = "cloud0"; } } _publicBridgeName = (String) params.get("public.network.device"); if (_publicBridgeName == null) { _publicBridgeName = "cloudbr0"; } _privBridgeName = (String) params.get("private.network.device"); if (_privBridgeName == null) { _privBridgeName = "cloudbr1"; } _guestBridgeName = (String) params.get("guest.network.device"); if (_guestBridgeName == null) { _guestBridgeName = _privBridgeName; } _privNwName = (String) params.get("private.network.name"); if (_privNwName == null) { if (isDeveloper) { _privNwName = "cloud-" + instance + "-private"; } else { _privNwName = "cloud-private"; } } _localStoragePath = (String) params.get("local.storage.path"); if (_localStoragePath == null) { _localStoragePath = "/var/lib/libvirt/images/"; } _localStorageUUID = (String) params.get("local.storage.uuid"); if (_localStorageUUID == null) { throw new ConfigurationException("local.storage.uuid is not set! Please set this to a valid UUID"); } value = (String) params.get("scripts.timeout"); _timeout = NumbersUtil.parseInt(value, 30 * 60) * 1000; value = (String) params.get("stop.script.timeout"); _stopTimeout = NumbersUtil.parseInt(value, 120) * 1000; value = (String) params.get("cmds.timeout"); _cmdsTimeout = NumbersUtil.parseInt(value, 7200) * 1000; value = (String) params.get("host.reserved.mem.mb"); _dom0MinMem = NumbersUtil.parseInt(value, 0) * 1024 * 1024; LibvirtConnection.initialize(_hypervisorURI); Connect conn = null; try { conn = LibvirtConnection.getConnection(); if (_bridgeType == BridgeType.OPENVSWITCH) { if (conn.getLibVirVersion() < (9 * 1000 + 11)) { throw new ConfigurationException("LibVirt version 0.9.11 required for openvswitch support, but version " + conn.getLibVirVersion() + " detected"); } } } catch (LibvirtException e) { throw new CloudRuntimeException(e.getMessage()); } /* Does node support HVM guest? If not, exit */ if (!IsHVMEnabled(conn)) { throw new ConfigurationException( "NO HVM support on this machine, please make sure: " + "1. VT/SVM is supported by your CPU, or is enabled in BIOS. " + "2. kvm modules are loaded (kvm, kvm_amd|kvm_intel)"); } _hypervisorPath = getHypervisorPath(conn); try { _hvVersion = conn.getVersion(); _hvVersion = (_hvVersion % 1000000) / 1000; } catch (LibvirtException e) { } String[] info = NetUtils.getNetworkParams(_privateNic); _monitor = new KVMHAMonitor(null, info[0], _heartBeatPath); Thread ha = new Thread(_monitor); ha.start(); _storagePoolMgr = new KVMStoragePoolManager(_storage, _monitor); _sysvmISOPath = (String) params.get("systemvm.iso.path"); if (_sysvmISOPath == null) { String[] isoPaths = { "/usr/lib64/cloud/agent/vms/systemvm.iso", "/usr/lib/cloud/agent/vms/systemvm.iso", "/usr/lib64/cloud/common/vms/systemvm.iso", "/usr/lib/cloud/common/vms/systemvm.iso" }; for (String isoPath : isoPaths) { if (_storage.exists(isoPath)) { _sysvmISOPath = isoPath; break; } } if (_sysvmISOPath == null) { s_logger.debug("Can't find system vm ISO"); } } switch (_bridgeType) { case OPENVSWITCH: getOvsPifs(); break; case NATIVE: default: getPifs(); break; } if (_pifs.get("private") == null) { s_logger.debug("Failed to get private nic name"); throw new ConfigurationException("Failed to get private nic name"); } if (_pifs.get("public") == null) { s_logger.debug("Failed to get public nic name"); throw new ConfigurationException("Failed to get public nic name"); } s_logger.debug("Found pif: " + _pifs.get("private") + " on " + _privBridgeName + ", pif: " + _pifs.get("public") + " on " + _publicBridgeName); _can_bridge_firewall = can_bridge_firewall(_pifs.get("public")); _localGateway = Script .runSimpleBashScript("ip route |grep default|awk '{print $3}'"); if (_localGateway == null) { s_logger.debug("Failed to found the local gateway"); } _mountPoint = (String) params.get("mount.path"); if (_mountPoint == null) { _mountPoint = "/mnt"; } value = (String) params.get("vm.migrate.speed"); _migrateSpeed = NumbersUtil.parseInt(value, -1); if (_migrateSpeed == -1) { //get guest network device speed _migrateSpeed = 0; String speed = Script.runSimpleBashScript("ethtool " + _pifs.get("public") + " |grep Speed | cut -d \\ -f 2"); if (speed != null) { String[] tokens = speed.split("M"); if (tokens.length == 2) { try { _migrateSpeed = Integer.parseInt(tokens[0]); } catch (Exception e) { } s_logger.debug("device " + _pifs.get("public") + " has speed: " + String.valueOf(_migrateSpeed)); } } params.put("vm.migrate.speed", String.valueOf(_migrateSpeed)); } Map<String, String> bridges = new HashMap<String, String>(); bridges.put("linklocal", _linkLocalBridgeName); bridges.put("public", _publicBridgeName); bridges.put("private", _privBridgeName); bridges.put("guest", _guestBridgeName); params.put("libvirt.host.bridges", bridges); params.put("libvirt.host.pifs", _pifs); // Load the vif driver String vifDriverName = (String) params.get("libvirt.vif.driver"); if (vifDriverName == null) { if (_bridgeType == BridgeType.OPENVSWITCH) { s_logger.info("No libvirt.vif.driver specififed. Defaults to OvsVifDriver."); vifDriverName = "com.cloud.hypervisor.kvm.resource.OvsVifDriver"; } else { s_logger.info("No libvirt.vif.driver specififed. Defaults to BridgeVifDriver."); vifDriverName = "com.cloud.hypervisor.kvm.resource.BridgeVifDriver"; } } params.put("libvirt.computing.resource", this); try { Class<?> clazz = Class.forName(vifDriverName); _vifDriver = (VifDriver) clazz.newInstance(); _vifDriver.configure(params); } catch (ClassNotFoundException e) { throw new ConfigurationException("Unable to find class for libvirt.vif.driver " + e); } catch (InstantiationException e) { throw new ConfigurationException("Unable to instantiate class for libvirt.vif.driver " + e); } catch (Exception e) { throw new ConfigurationException("Failed to initialize libvirt.vif.driver " + e); } return true; } private void getPifs() { File dir = new File("/sys/devices/virtual/net"); File[] netdevs = dir.listFiles(); List<String> bridges = new ArrayList<String>(); for (int i = 0; i < netdevs.length; i++) { File isbridge = new File(netdevs[i].getAbsolutePath() + "/bridge"); String netdevName = netdevs[i].getName(); s_logger.debug("looking in file " + netdevs[i].getAbsolutePath() + "/bridge"); if (isbridge.exists()) { s_logger.debug("Found bridge " + netdevName); bridges.add(netdevName); } } for (String bridge : bridges) { s_logger.debug("looking for pif for bridge " + bridge); String pif = getPif(bridge); if(_publicBridgeName != null && bridge.equals(_publicBridgeName)){ _pifs.put("public", pif); } if (_guestBridgeName != null && bridge.equals(_guestBridgeName)) { _pifs.put("private", pif); } _pifs.put(bridge, pif); } s_logger.debug("done looking for pifs, no more bridges"); } private void getOvsPifs() { String cmdout = Script.runSimpleBashScript("ovs-vsctl list-br | sed '{:q;N;s/\\n/%/g;t q}'"); s_logger.debug("cmdout was " + cmdout); List<String> bridges = Arrays.asList(cmdout.split("%")); for (String bridge : bridges) { s_logger.debug("looking for pif for bridge " + bridge); // String pif = getOvsPif(bridge); // Not really interested in the pif name at this point for ovs // bridges String pif = bridge; if (_publicBridgeName != null && bridge.equals(_publicBridgeName)) { _pifs.put("public", pif); } if (_guestBridgeName != null && bridge.equals(_guestBridgeName)) { _pifs.put("private", pif); } _pifs.put(bridge, pif); } s_logger.debug("done looking for pifs, no more bridges"); } private String getPif(String bridge) { String pif = matchPifFileInDirectory(bridge); File vlanfile = new File("/proc/net/vlan" + pif); if (vlanfile.isFile()) { pif = Script.runSimpleBashScript("grep ^Device\\: /proc/net/vlan/" + pif + " | awk {'print $2'}"); } return pif; } private String getOvsPif(String bridge) { String pif = Script.runSimpleBashScript("ovs-vsctl list-ports " + bridge); return pif; } private String matchPifFileInDirectory(String bridgeName){ File f = new File("/sys/devices/virtual/net/" + bridgeName + "/brif"); if (! f.isDirectory()){ s_logger.debug("failing to get physical interface from bridge" + bridgeName + ", does " + f.getAbsolutePath() + "exist?"); return ""; } File[] interfaces = f.listFiles(); for (int i = 0; i < interfaces.length; i++) { String fname = interfaces[i].getName(); s_logger.debug("matchPifFileInDirectory: file name '"+fname+"'"); if (fname.startsWith("eth") || fname.startsWith("bond") || fname.startsWith("vlan") || fname.startsWith("em")) { return fname; } } s_logger.debug("failing to get physical interface from bridge" + bridgeName + ", did not find an eth*, bond*, or vlan* in " + f.getAbsolutePath()); return ""; } private boolean checkNetwork(String networkName) { if (networkName == null) { return true; } if (_bridgeType == BridgeType.OPENVSWITCH) { return checkOvsNetwork(networkName); } else { return checkBridgeNetwork(networkName); } } private boolean checkBridgeNetwork(String networkName) { if (networkName == null) { return true; } String name = matchPifFileInDirectory(networkName); if (name == null || name.isEmpty()) { return false; } else { return true; } } private boolean checkOvsNetwork(String networkName) { s_logger.debug("Checking if network " + networkName + " exists as openvswitch bridge"); if (networkName == null) { return true; } Script command = new Script("/bin/sh", _timeout); command.add("-c"); command.add("ovs-vsctl br-exists " + networkName); String result = command.execute(null); if ("Ok".equals(result)) { return true; } else { return false; } } private String getVnetId(String vnetId) { return vnetId; } private void patchSystemVm(String cmdLine, String dataDiskPath, String vmName) throws InternalErrorException { String result; final Script command = new Script(_patchdomrPath, _timeout, s_logger); command.add("-l", vmName); command.add("-t", "all"); command.add("-d", dataDiskPath); command.add("-p", cmdLine.replaceAll(" ", "%")); result = command.execute(); if (result != null) { throw new InternalErrorException(result); } } boolean isDirectAttachedNetwork(String type) { if ("untagged".equalsIgnoreCase(type)) { return true; } else { try { Long.valueOf(type); } catch (NumberFormatException e) { return true; } return false; } } protected String startDomain(Connect conn, String vmName, String domainXML) throws LibvirtException, InternalErrorException { Domain dm = null; try { /* We create a transient domain here. When this method gets called we receive a full XML specification of the guest, so no need to define it persistent. This also makes sure we never have any old "garbage" defined in libvirt which might haunt us. */ dm = conn.domainCreateXML(domainXML, 0); } catch (final LibvirtException e) { s_logger.warn("Failed to start domain " + vmName + ": " + e.getMessage()); } return null; } @Override public boolean stop() { try { Connect conn = LibvirtConnection.getConnection(); conn.close(); } catch (LibvirtException e) { } return true; } @Override public Answer executeRequest(Command cmd) { try { if (cmd instanceof StopCommand) { return execute((StopCommand) cmd); } else if (cmd instanceof GetVmStatsCommand) { return execute((GetVmStatsCommand) cmd); } else if (cmd instanceof RebootRouterCommand) { return execute((RebootRouterCommand) cmd); } else if (cmd instanceof RebootCommand) { return execute((RebootCommand) cmd); } else if (cmd instanceof GetHostStatsCommand) { return execute((GetHostStatsCommand) cmd); } else if (cmd instanceof CheckStateCommand) { return executeRequest(cmd); } else if (cmd instanceof CheckHealthCommand) { return execute((CheckHealthCommand) cmd); } else if (cmd instanceof PrepareForMigrationCommand) { return execute((PrepareForMigrationCommand) cmd); } else if (cmd instanceof MigrateCommand) { return execute((MigrateCommand) cmd); } else if (cmd instanceof PingTestCommand) { return execute((PingTestCommand) cmd); } else if (cmd instanceof CheckVirtualMachineCommand) { return execute((CheckVirtualMachineCommand) cmd); } else if (cmd instanceof ReadyCommand) { return execute((ReadyCommand) cmd); } else if (cmd instanceof AttachIsoCommand) { return execute((AttachIsoCommand) cmd); } else if (cmd instanceof AttachVolumeCommand) { return execute((AttachVolumeCommand) cmd); } else if (cmd instanceof StopCommand) { return execute((StopCommand) cmd); } else if (cmd instanceof CheckConsoleProxyLoadCommand) { return execute((CheckConsoleProxyLoadCommand) cmd); } else if (cmd instanceof WatchConsoleProxyLoadCommand) { return execute((WatchConsoleProxyLoadCommand) cmd); } else if (cmd instanceof GetVncPortCommand) { return execute((GetVncPortCommand) cmd); } else if (cmd instanceof ModifySshKeysCommand) { return execute((ModifySshKeysCommand) cmd); } else if (cmd instanceof MaintainCommand) { return execute((MaintainCommand) cmd); } else if (cmd instanceof CreateCommand) { return execute((CreateCommand) cmd); } else if (cmd instanceof DestroyCommand) { return execute((DestroyCommand) cmd); } else if (cmd instanceof PrimaryStorageDownloadCommand) { return execute((PrimaryStorageDownloadCommand) cmd); } else if (cmd instanceof CreatePrivateTemplateFromVolumeCommand) { return execute((CreatePrivateTemplateFromVolumeCommand) cmd); } else if (cmd instanceof GetStorageStatsCommand) { return execute((GetStorageStatsCommand) cmd); } else if (cmd instanceof ManageSnapshotCommand) { return execute((ManageSnapshotCommand) cmd); } else if (cmd instanceof BackupSnapshotCommand) { return execute((BackupSnapshotCommand) cmd); } else if (cmd instanceof CreateVolumeFromSnapshotCommand) { return execute((CreateVolumeFromSnapshotCommand) cmd); } else if (cmd instanceof CreatePrivateTemplateFromSnapshotCommand) { return execute((CreatePrivateTemplateFromSnapshotCommand) cmd); } else if (cmd instanceof UpgradeSnapshotCommand) { return execute((UpgradeSnapshotCommand) cmd); } else if (cmd instanceof CreateStoragePoolCommand) { return execute((CreateStoragePoolCommand) cmd); } else if (cmd instanceof ModifyStoragePoolCommand) { return execute((ModifyStoragePoolCommand) cmd); } else if (cmd instanceof SecurityGroupRulesCmd) { return execute((SecurityGroupRulesCmd) cmd); } else if (cmd instanceof DeleteStoragePoolCommand) { return execute((DeleteStoragePoolCommand) cmd); } else if (cmd instanceof FenceCommand) { return execute((FenceCommand) cmd); } else if (cmd instanceof StartCommand) { return execute((StartCommand) cmd); } else if (cmd instanceof PlugNicCommand) { return execute((PlugNicCommand) cmd); } else if (cmd instanceof UnPlugNicCommand) { return execute((UnPlugNicCommand) cmd); } else if (cmd instanceof SetupGuestNetworkCommand) { return execute((SetupGuestNetworkCommand) cmd); } else if (cmd instanceof SetNetworkACLCommand) { return execute((SetNetworkACLCommand) cmd); } else if (cmd instanceof SetSourceNatCommand) { return execute((SetSourceNatCommand) cmd); } else if (cmd instanceof IpAssocVpcCommand) { return execute((IpAssocVpcCommand) cmd); } else if (cmd instanceof IpAssocCommand) { return execute((IpAssocCommand) cmd); } else if (cmd instanceof NetworkElementCommand) { return _virtRouterResource.executeRequest(cmd); } else if (cmd instanceof CheckSshCommand) { return execute((CheckSshCommand) cmd); } else if (cmd instanceof NetworkUsageCommand) { return execute((NetworkUsageCommand) cmd); } else if (cmd instanceof NetworkRulesSystemVmCommand) { return execute((NetworkRulesSystemVmCommand) cmd); } else if (cmd instanceof CleanupNetworkRulesCmd) { return execute((CleanupNetworkRulesCmd) cmd); } else if (cmd instanceof CopyVolumeCommand) { return execute((CopyVolumeCommand) cmd); } else if (cmd instanceof ResizeVolumeCommand) { return execute((ResizeVolumeCommand) cmd); } else if (cmd instanceof CheckNetworkCommand) { return execute((CheckNetworkCommand) cmd); } else { s_logger.warn("Unsupported command "); return Answer.createUnsupportedCommandAnswer(cmd); } } catch (final IllegalArgumentException e) { return new Answer(cmd, false, e.getMessage()); } } private CheckNetworkAnswer execute(CheckNetworkCommand cmd) { List<PhysicalNetworkSetupInfo> phyNics = cmd .getPhysicalNetworkInfoList(); String errMsg = null; for (PhysicalNetworkSetupInfo nic : phyNics) { if (!checkNetwork(nic.getGuestNetworkName())) { errMsg = "Can not find network: " + nic.getGuestNetworkName(); break; } else if (!checkNetwork(nic.getPrivateNetworkName())) { errMsg = "Can not find network: " + nic.getPrivateNetworkName(); break; } else if (!checkNetwork(nic.getPublicNetworkName())) { errMsg = "Can not find network: " + nic.getPublicNetworkName(); break; } } if (errMsg != null) { return new CheckNetworkAnswer(cmd, false, errMsg); } else { return new CheckNetworkAnswer(cmd, true, null); } } private CopyVolumeAnswer execute(CopyVolumeCommand cmd) { boolean copyToSecondary = cmd.toSecondaryStorage(); String volumePath = cmd.getVolumePath(); StorageFilerTO pool = cmd.getPool(); String secondaryStorageUrl = cmd.getSecondaryStorageURL(); KVMStoragePool secondaryStoragePool = null; try { KVMStoragePool primaryPool = _storagePoolMgr.getStoragePool( pool.getType(), pool.getUuid()); String volumeName = UUID.randomUUID().toString(); if (copyToSecondary) { String destVolumeName = volumeName + ".qcow2"; KVMPhysicalDisk volume = primaryPool.getPhysicalDisk(cmd .getVolumePath()); String volumeDestPath = "/volumes/" + cmd.getVolumeId() + File.separator; secondaryStoragePool = _storagePoolMgr.getStoragePoolByURI( secondaryStorageUrl); secondaryStoragePool.createFolder(volumeDestPath); secondaryStoragePool.delete(); secondaryStoragePool = _storagePoolMgr.getStoragePoolByURI( secondaryStorageUrl + volumeDestPath); _storagePoolMgr.copyPhysicalDisk(volume, destVolumeName,secondaryStoragePool); return new CopyVolumeAnswer(cmd, true, null, null, volumeName); } else { volumePath = "/volumes/" + cmd.getVolumeId() + File.separator; secondaryStoragePool = _storagePoolMgr.getStoragePoolByURI( secondaryStorageUrl + volumePath); KVMPhysicalDisk volume = secondaryStoragePool .getPhysicalDisk(cmd.getVolumePath() + ".qcow2"); _storagePoolMgr.copyPhysicalDisk(volume, volumeName, primaryPool); return new CopyVolumeAnswer(cmd, true, null, null, volumeName); } } catch (CloudRuntimeException e) { return new CopyVolumeAnswer(cmd, false, e.toString(), null, null); } finally { if (secondaryStoragePool != null) { secondaryStoragePool.delete(); } } } protected Answer execute(DeleteStoragePoolCommand cmd) { try { _storagePoolMgr.deleteStoragePool(cmd.getPool().getType(), cmd.getPool().getUuid()); return new Answer(cmd); } catch (CloudRuntimeException e) { return new Answer(cmd, false, e.toString()); } } protected FenceAnswer execute(FenceCommand cmd) { ExecutorService executors = Executors.newSingleThreadExecutor(); List<NfsStoragePool> pools = _monitor.getStoragePools(); KVMHAChecker ha = new KVMHAChecker(pools, cmd.getHostIp()); Future<Boolean> future = executors.submit(ha); try { Boolean result = future.get(); if (result) { return new FenceAnswer(cmd, false, "Heart is still beating..."); } else { return new FenceAnswer(cmd); } } catch (InterruptedException e) { s_logger.warn("Unable to fence", e); return new FenceAnswer(cmd, false, e.getMessage()); } catch (ExecutionException e) { s_logger.warn("Unable to fence", e); return new FenceAnswer(cmd, false, e.getMessage()); } } protected Storage.StorageResourceType getStorageResourceType() { return Storage.StorageResourceType.STORAGE_POOL; } protected Answer execute(CreateCommand cmd) { StorageFilerTO pool = cmd.getPool(); DiskProfile dskch = cmd.getDiskCharacteristics(); KVMPhysicalDisk BaseVol = null; KVMStoragePool primaryPool = null; KVMPhysicalDisk vol = null; long disksize; try { primaryPool = _storagePoolMgr.getStoragePool(pool.getType(), pool.getUuid()); disksize = dskch.getSize(); if (cmd.getTemplateUrl() != null) { if(primaryPool.getType() == StoragePoolType.CLVM) { vol = templateToPrimaryDownload(cmd.getTemplateUrl(),primaryPool); } else { BaseVol = primaryPool.getPhysicalDisk(cmd.getTemplateUrl()); vol = _storagePoolMgr.createDiskFromTemplate(BaseVol, UUID .randomUUID().toString(), primaryPool); } if (vol == null) { return new Answer(cmd, false, " Can't create storage volume on storage pool"); } } else { vol = primaryPool.createPhysicalDisk(UUID.randomUUID() .toString(), dskch.getSize()); } VolumeTO volume = new VolumeTO(cmd.getVolumeId(), dskch.getType(), pool.getType(), pool.getUuid(), pool.getPath(), vol.getName(), vol.getName(), disksize, null); return new CreateAnswer(cmd, volume); } catch (CloudRuntimeException e) { s_logger.debug("Failed to create volume: " + e.toString()); return new CreateAnswer(cmd, e); } } // this is much like PrimaryStorageDownloadCommand, but keeping it separate protected KVMPhysicalDisk templateToPrimaryDownload(String templateUrl, KVMStoragePool primaryPool) { int index = templateUrl.lastIndexOf("/"); String mountpoint = templateUrl.substring(0, index); String templateName = null; if (index < templateUrl.length() - 1) { templateName = templateUrl.substring(index + 1); } KVMPhysicalDisk templateVol = null; KVMStoragePool secondaryPool = null; try { secondaryPool = _storagePoolMgr.getStoragePoolByURI(mountpoint); /* Get template vol */ if (templateName == null) { secondaryPool.refresh(); List<KVMPhysicalDisk> disks = secondaryPool.listPhysicalDisks(); if (disks == null || disks.isEmpty()) { s_logger.error("Failed to get volumes from pool: " + secondaryPool.getUuid()); return null; } for (KVMPhysicalDisk disk : disks) { if (disk.getName().endsWith("qcow2")) { templateVol = disk; break; } } if (templateVol == null) { s_logger.error("Failed to get template from pool: " + secondaryPool.getUuid()); return null; } } else { templateVol = secondaryPool.getPhysicalDisk(templateName); } /* Copy volume to primary storage */ KVMPhysicalDisk primaryVol = _storagePoolMgr.copyPhysicalDisk(templateVol, UUID.randomUUID().toString(), primaryPool); return primaryVol; } catch (CloudRuntimeException e) { s_logger.error("Failed to download template to primary storage",e); return null; } finally { if (secondaryPool != null) { secondaryPool.delete(); } } } private String getResizeScriptType (KVMStoragePool pool, KVMPhysicalDisk vol) { StoragePoolType poolType = pool.getType(); PhysicalDiskFormat volFormat = vol.getFormat(); if(pool.getType() == StoragePoolType.CLVM && volFormat == KVMPhysicalDisk.PhysicalDiskFormat.RAW) { return "CLVM"; } else if ((poolType == StoragePoolType.NetworkFilesystem || poolType == StoragePoolType.SharedMountPoint || poolType == StoragePoolType.Filesystem) && volFormat == KVMPhysicalDisk.PhysicalDiskFormat.QCOW2 ) { return "QCOW2"; } return null; } /* uses a local script now, eventually support for virStorageVolResize() will maybe work on qcow2 and lvm and we can do this in libvirt calls */ public Answer execute(ResizeVolumeCommand cmd) { String volid = cmd.getPath(); long newSize = cmd.getNewSize(); long currentSize = cmd.getCurrentSize(); String vmInstanceName = cmd.getInstanceName(); boolean shrinkOk = cmd.getShrinkOk(); StorageFilerTO spool = cmd.getPool(); try { KVMStoragePool pool = _storagePoolMgr.getStoragePool(spool.getType(), spool.getUuid()); KVMPhysicalDisk vol = pool.getPhysicalDisk(volid); String path = vol.getPath(); String type = getResizeScriptType(pool, vol); if (type == null) { return new ResizeVolumeAnswer(cmd, false, "Unsupported volume format: pool type '" + pool.getType() + "' and volume format '" + vol.getFormat() + "'"); } else if (type.equals("QCOW2") && shrinkOk) { return new ResizeVolumeAnswer(cmd, false, "Unable to shrink volumes of type " + type); } s_logger.debug("got to the stage where we execute the volume resize, params:" + path + "," + currentSize + "," + newSize + "," + type + "," + vmInstanceName + "," + shrinkOk); final Script resizecmd = new Script(_resizeVolumePath, _cmdsTimeout, s_logger); resizecmd.add("-s",String.valueOf(newSize)); resizecmd.add("-c",String.valueOf(currentSize)); resizecmd.add("-p",path); resizecmd.add("-t",type); resizecmd.add("-r",String.valueOf(shrinkOk)); resizecmd.add("-v",vmInstanceName); String result = resizecmd.execute(); if (result == null) { /* fetch new size as seen from libvirt, don't want to assume anything */ pool = _storagePoolMgr.getStoragePool(spool.getType(), spool.getUuid()); long finalSize = pool.getPhysicalDisk(volid).getVirtualSize(); s_logger.debug("after resize, size reports as " + finalSize + ", requested " + newSize); return new ResizeVolumeAnswer(cmd, true, "success", finalSize); } return new ResizeVolumeAnswer(cmd, false, result); } catch (CloudRuntimeException e) { String error = "failed to resize volume: " + e; s_logger.debug(error); return new ResizeVolumeAnswer(cmd, false, error); } } public Answer execute(DestroyCommand cmd) { VolumeTO vol = cmd.getVolume(); try { KVMStoragePool pool = _storagePoolMgr.getStoragePool( vol.getPoolType(), vol.getPoolUuid()); pool.deletePhysicalDisk(vol.getPath()); String vmName = cmd.getVmName(); String poolPath = pool.getLocalPath(); /* if vol is a root disk for a system vm, try to remove accompanying patch disk as well this is a bit tricky since the patchdisk is only a LibvirtComputingResource construct and not tracked anywhere in cloudstack */ if (vol.getType() == Volume.Type.ROOT && vmName.matches("^[rsv]-\\d+-.+$")) { File patchVbd = new File(poolPath + File.separator + vmName + "-patchdisk"); if(patchVbd.exists()){ try { _storagePoolMgr.deleteVbdByPath(vol.getPoolType(),patchVbd.getAbsolutePath()); } catch(CloudRuntimeException e) { s_logger.warn("unable to destroy patch disk '" + patchVbd.getAbsolutePath() + "' while removing root disk for " + vmName + " : " + e); } } else { s_logger.debug("file '" +patchVbd.getAbsolutePath()+ "' not found"); } } return new Answer(cmd, true, "Success"); } catch (CloudRuntimeException e) { s_logger.debug("Failed to delete volume: " + e.toString()); return new Answer(cmd, false, e.toString()); } } private String getVlanIdFromBridge(String brName) { String pif= matchPifFileInDirectory(brName); String[] pifparts = pif.split("\\."); if(pifparts.length == 2) { return pifparts[1]; } else { s_logger.debug("failed to get vlan id from bridge " + brName + "attached to physical interface" + pif); return ""; } } private void VifHotPlug(Connect conn, String vmName, String vlanId, String macAddr) throws InternalErrorException, LibvirtException { NicTO nicTO = new NicTO(); nicTO.setMac(macAddr); nicTO.setType(TrafficType.Public); if (vlanId == null) { nicTO.setBroadcastType(BroadcastDomainType.Native); } else { nicTO.setBroadcastType(BroadcastDomainType.Vlan); nicTO.setBroadcastUri(BroadcastDomainType.Vlan.toUri(vlanId)); } Domain vm = getDomain(conn, vmName); vm.attachDevice(_vifDriver.plug(nicTO, "Other PV (32-bit)").toString()); } private PlugNicAnswer execute(PlugNicCommand cmd) { Connect conn; NicTO nic = cmd.getNic(); String vmName = cmd.getVmName(); try { conn = LibvirtConnection.getConnection(); Domain vm = getDomain(conn, vmName); List<InterfaceDef> pluggedNics = getInterfaces(conn, vmName); Integer nicnum = 0; for (InterfaceDef pluggedNic : pluggedNics) { if (pluggedNic.getMacAddress().equalsIgnoreCase(nic.getMac())) { s_logger.debug("found existing nic for mac "+ pluggedNic.getMacAddress() + " at index "+nicnum); return new PlugNicAnswer(cmd, true, "success"); } nicnum++; } vm.attachDevice(_vifDriver.plug(nic, "Other PV (32-bit)").toString()); return new PlugNicAnswer(cmd, true, "success"); } catch (Exception e) { String msg = " Plug Nic failed due to " + e.toString(); s_logger.warn(msg, e); return new PlugNicAnswer(cmd, false, msg); } } private UnPlugNicAnswer execute(UnPlugNicCommand cmd) { Connect conn; NicTO nic = cmd.getNic(); String vmName = cmd.getInstanceName(); try { conn = LibvirtConnection.getConnection(); Domain vm = getDomain(conn, vmName); List<InterfaceDef> pluggedNics = getInterfaces(conn, vmName); for (InterfaceDef pluggedNic : pluggedNics) { if (pluggedNic.getMacAddress().equalsIgnoreCase(nic.getMac())) { vm.detachDevice(pluggedNic.toString()); return new UnPlugNicAnswer(cmd, true, "success"); } } return new UnPlugNicAnswer(cmd, true, "success"); } catch (Exception e) { String msg = " Unplug Nic failed due to " + e.toString(); s_logger.warn(msg, e); return new UnPlugNicAnswer(cmd, false, msg); } } private SetupGuestNetworkAnswer execute(SetupGuestNetworkCommand cmd) { Connect conn; NicTO nic = cmd.getNic(); String routerIP = cmd.getAccessDetail(NetworkElementCommand.ROUTER_IP); String routerGIP = cmd.getAccessDetail(NetworkElementCommand.ROUTER_GUEST_IP); String routerName = cmd.getAccessDetail(NetworkElementCommand.ROUTER_NAME); String gateway = cmd.getAccessDetail(NetworkElementCommand.GUEST_NETWORK_GATEWAY); String cidr = Long.toString(NetUtils.getCidrSize(nic.getNetmask()));; String domainName = cmd.getNetworkDomain(); String dns = cmd.getDefaultDns1(); if (dns == null || dns.isEmpty()) { dns = cmd.getDefaultDns2(); } else { String dns2= cmd.getDefaultDns2(); if ( dns2 != null && !dns2.isEmpty()) { dns += "," + dns2; } } try { conn = LibvirtConnection.getConnection(); Domain vm = getDomain(conn, routerName); List<InterfaceDef> pluggedNics = getInterfaces(conn, routerName); InterfaceDef routerNic = null; for (InterfaceDef pluggedNic : pluggedNics) { if (pluggedNic.getMacAddress().equalsIgnoreCase(nic.getMac())) { routerNic = pluggedNic; break; } } if ( routerNic == null ) { return new SetupGuestNetworkAnswer(cmd, false, "Can not find nic with mac " + nic.getMac() + " for VM " + routerName); } String args = "vpc_guestnw.sh " + routerIP + " -C"; String dev = "eth" + nic.getDeviceId(); String netmask = NetUtils.getSubNet(routerGIP, nic.getNetmask()); String result = _virtRouterResource.assignGuestNetwork(dev, routerIP, routerGIP, gateway, cidr, netmask, dns, domainName ); if (result != null) { return new SetupGuestNetworkAnswer(cmd, false, "Creating guest network failed due to " + result); } return new SetupGuestNetworkAnswer(cmd, true, "success"); } catch (Exception e) { String msg = "Creating guest network failed due to " + e.toString(); s_logger.warn(msg, e); return new SetupGuestNetworkAnswer(cmd, false, msg); } } private SetNetworkACLAnswer execute(SetNetworkACLCommand cmd) { String[] results = new String[cmd.getRules().length]; String callResult; Connect conn; String routerName = cmd.getAccessDetail(NetworkElementCommand.ROUTER_NAME); String routerIp = cmd.getAccessDetail(NetworkElementCommand.ROUTER_IP); try { conn = LibvirtConnection.getConnection(); Domain vm = getDomain(conn, routerName); String [][] rules = cmd.generateFwRules(); String[] aclRules = rules[0]; NicTO nic = cmd.getNic(); String dev = "eth" + nic.getDeviceId(); String netmask = Long.toString(NetUtils.getCidrSize(nic.getNetmask())); StringBuilder sb = new StringBuilder(); for (int i = 0; i < aclRules.length; i++) { sb.append(aclRules[i]).append(','); } String rule = sb.toString(); String result = _virtRouterResource.assignNetworkACL(routerIp, dev, nic.getIp(), netmask, rule); if (result != null) { for (int i=0; i < results.length; i++) { results[i] = "Failed"; } return new SetNetworkACLAnswer(cmd, false, results); } return new SetNetworkACLAnswer(cmd, true, results); } catch (Exception e) { String msg = "SetNetworkACL failed due to " + e.toString(); s_logger.error(msg, e); return new SetNetworkACLAnswer(cmd, false, results); } } protected SetSourceNatAnswer execute(SetSourceNatCommand cmd) { Connect conn; String routerName = cmd.getAccessDetail(NetworkElementCommand.ROUTER_NAME); String routerIP = cmd.getAccessDetail(NetworkElementCommand.ROUTER_IP); IpAddressTO pubIP = cmd.getIpAddress(); try { conn = LibvirtConnection.getConnection(); Domain vm = getDomain(conn, routerName); Integer devNum = 0; String pubVlan = pubIP.getVlanId(); List<InterfaceDef> pluggedNics = getInterfaces(conn, routerName); for (InterfaceDef pluggedNic : pluggedNics) { String pluggedVlanBr = pluggedNic.getBrName(); String pluggedVlanId = getVlanIdFromBridge(pluggedVlanBr); if (pubVlan.equalsIgnoreCase(Vlan.UNTAGGED) && pluggedVlanBr.equalsIgnoreCase(_publicBridgeName)) { break; } else if (pluggedVlanBr.equalsIgnoreCase(_linkLocalBridgeName)){ /*skip over, no physical bridge device exists*/ } else if (pluggedVlanId == null) { /*this should only be true in the case of link local bridge*/ return new SetSourceNatAnswer(cmd, false, "unable to find the vlan id for bridge "+pluggedVlanBr+ " when attempting to set up" + pubVlan + " on router " + routerName); } else if (pluggedVlanId.equals(pubVlan)) { break; } devNum++; } String dev = "eth" + devNum; String result = _virtRouterResource.assignSourceNat(routerIP, pubIP.getPublicIp(), dev); if (result != null) { return new SetSourceNatAnswer(cmd, false, "KVM plugin \"vpc_snat\" failed:"+result); } return new SetSourceNatAnswer(cmd, true, "success"); } catch (Exception e) { String msg = "Ip SNAT failure due to " + e.toString(); s_logger.error(msg, e); return new SetSourceNatAnswer(cmd, false, msg); } } protected IpAssocAnswer execute(IpAssocVpcCommand cmd) { Connect conn; String[] results = new String[cmd.getIpAddresses().length]; int i = 0; String routerName = cmd.getAccessDetail(NetworkElementCommand.ROUTER_NAME); String routerIP = cmd.getAccessDetail(NetworkElementCommand.ROUTER_IP); try { conn = LibvirtConnection.getConnection(); IpAddressTO[] ips = cmd.getIpAddresses(); Domain vm = getDomain(conn, routerName); Integer devNum = 0; Map<String, Integer> vlanToNicNum = new HashMap<String, Integer>(); List<InterfaceDef> pluggedNics = getInterfaces(conn, routerName); for (InterfaceDef pluggedNic : pluggedNics) { String pluggedVlan = pluggedNic.getBrName(); if (pluggedVlan.equalsIgnoreCase(_linkLocalBridgeName)) { vlanToNicNum.put("LinkLocal",devNum); } else if (pluggedVlan.equalsIgnoreCase(_publicBridgeName) || pluggedVlan.equalsIgnoreCase(_privBridgeName) || pluggedVlan.equalsIgnoreCase(_guestBridgeName)) { vlanToNicNum.put(Vlan.UNTAGGED,devNum); } else { vlanToNicNum.put(getVlanIdFromBridge(pluggedVlan),devNum); } devNum++; } for (IpAddressTO ip : ips) { String nicName = "eth" + vlanToNicNum.get(ip.getVlanId()); String netmask = Long.toString(NetUtils.getCidrSize(ip.getVlanNetmask())); String subnet = NetUtils.getSubNet(ip.getPublicIp(), ip.getVlanNetmask()); _virtRouterResource.assignVpcIpToRouter(routerIP, ip.isAdd(), ip.getPublicIp(), nicName, ip.getVlanGateway(), netmask, subnet); results[i++] = ip.getPublicIp() + " - success"; } } catch (Exception e) { s_logger.error("Ip Assoc failure on applying one ip due to exception: ", e); results[i++] = IpAssocAnswer.errorResult; } return new IpAssocAnswer(cmd, results); } public Answer execute(IpAssocCommand cmd) { String routerName = cmd .getAccessDetail(NetworkElementCommand.ROUTER_NAME); String routerIp = cmd.getAccessDetail(NetworkElementCommand.ROUTER_IP); String[] results = new String[cmd.getIpAddresses().length]; Connect conn; try { conn = LibvirtConnection.getConnection(); List<InterfaceDef> nics = getInterfaces(conn, routerName); Map<String, Integer> vlanAllocatedToVM = new HashMap<String, Integer>(); Integer nicPos = 0; for (InterfaceDef nic : nics) { if (nic.getBrName().equalsIgnoreCase(_linkLocalBridgeName)) { vlanAllocatedToVM.put("LinkLocal", nicPos); } else { if (nic.getBrName().equalsIgnoreCase(_publicBridgeName) || nic.getBrName().equalsIgnoreCase(_privBridgeName) || nic.getBrName().equalsIgnoreCase(_guestBridgeName)) { vlanAllocatedToVM.put(Vlan.UNTAGGED, nicPos); } else { String vlanId = getVlanIdFromBridge(nic.getBrName()); vlanAllocatedToVM.put(vlanId, nicPos); } } nicPos++; } IpAddressTO[] ips = cmd.getIpAddresses(); int i = 0; String result = null; int nicNum = 0; for (IpAddressTO ip : ips) { if (!vlanAllocatedToVM.containsKey(ip.getVlanId())) { /* plug a vif into router */ VifHotPlug(conn, routerName, ip.getVlanId(), ip.getVifMacAddress()); vlanAllocatedToVM.put(ip.getVlanId(), nicPos++); } nicNum = vlanAllocatedToVM.get(ip.getVlanId()); networkUsage(routerIp, "addVif", "eth" + nicNum); result = _virtRouterResource.assignPublicIpAddress(routerName, routerIp, ip.getPublicIp(), ip.isAdd(), ip.isFirstIP(), ip.isSourceNat(), ip.getVlanId(), ip.getVlanGateway(), ip.getVlanNetmask(), ip.getVifMacAddress(), nicNum); if (result != null) { results[i++] = IpAssocAnswer.errorResult; } else { results[i++] = ip.getPublicIp() + " - success"; ; } } return new IpAssocAnswer(cmd, results); } catch (LibvirtException e) { return new IpAssocAnswer(cmd, results); } catch (InternalErrorException e) { return new IpAssocAnswer(cmd, results); } } protected ManageSnapshotAnswer execute(final ManageSnapshotCommand cmd) { String snapshotName = cmd.getSnapshotName(); String snapshotPath = cmd.getSnapshotPath(); String vmName = cmd.getVmName(); try { Connect conn = LibvirtConnection.getConnection(); DomainInfo.DomainState state = null; Domain vm = null; if (vmName != null) { try { vm = getDomain(conn, cmd.getVmName()); state = vm.getInfo().state; } catch (LibvirtException e) { } } KVMStoragePool primaryPool = _storagePoolMgr.getStoragePool( cmd.getPool().getType(), cmd.getPool().getUuid()); if (primaryPool.getType() == StoragePoolType.RBD) { s_logger.debug("Snapshots are not supported on RBD volumes"); return new ManageSnapshotAnswer(cmd, false, "Snapshots are not supported on RBD volumes"); } KVMPhysicalDisk disk = primaryPool.getPhysicalDisk(cmd .getVolumePath()); if (state == DomainInfo.DomainState.VIR_DOMAIN_RUNNING && !primaryPool.isExternalSnapshot()) { String vmUuid = vm.getUUIDString(); Object[] args = new Object[] { snapshotName, vmUuid }; String snapshot = SnapshotXML.format(args); s_logger.debug(snapshot); if (cmd.getCommandSwitch().equalsIgnoreCase( ManageSnapshotCommand.CREATE_SNAPSHOT)) { vm.snapshotCreateXML(snapshot); } else { DomainSnapshot snap = vm.snapshotLookupByName(snapshotName); snap.delete(0); } /* * libvirt on RHEL6 doesn't handle resume event emitted from * qemu */ vm = getDomain(conn, cmd.getVmName()); state = vm.getInfo().state; if (state == DomainInfo.DomainState.VIR_DOMAIN_PAUSED) { vm.resume(); } } else { /* VM is not running, create a snapshot by ourself */ final Script command = new Script(_manageSnapshotPath, _cmdsTimeout, s_logger); if (cmd.getCommandSwitch().equalsIgnoreCase( ManageSnapshotCommand.CREATE_SNAPSHOT)) { command.add("-c", disk.getPath()); } else { command.add("-d", snapshotPath); } command.add("-n", snapshotName); String result = command.execute(); if (result != null) { s_logger.debug("Failed to manage snapshot: " + result); return new ManageSnapshotAnswer(cmd, false, "Failed to manage snapshot: " + result); } } return new ManageSnapshotAnswer(cmd, cmd.getSnapshotId(), disk.getPath() + File.separator + snapshotName, true, null); } catch (LibvirtException e) { s_logger.debug("Failed to manage snapshot: " + e.toString()); return new ManageSnapshotAnswer(cmd, false, "Failed to manage snapshot: " + e.toString()); } } protected BackupSnapshotAnswer execute(final BackupSnapshotCommand cmd) { Long dcId = cmd.getDataCenterId(); Long accountId = cmd.getAccountId(); Long volumeId = cmd.getVolumeId(); String secondaryStoragePoolUrl = cmd.getSecondaryStorageUrl(); String snapshotName = cmd.getSnapshotName(); String snapshotPath = cmd.getVolumePath(); String snapshotDestPath = null; String snapshotRelPath = null; String vmName = cmd.getVmName(); KVMStoragePool secondaryStoragePool = null; try { Connect conn = LibvirtConnection.getConnection(); secondaryStoragePool = _storagePoolMgr.getStoragePoolByURI( secondaryStoragePoolUrl); String ssPmountPath = secondaryStoragePool.getLocalPath(); snapshotRelPath = File.separator + "snapshots" + File.separator + dcId + File.separator + accountId + File.separator + volumeId; snapshotDestPath = ssPmountPath + File.separator + "snapshots" + File.separator + dcId + File.separator + accountId + File.separator + volumeId; KVMStoragePool primaryPool = _storagePoolMgr.getStoragePool( cmd.getPool().getType(), cmd.getPrimaryStoragePoolNameLabel()); KVMPhysicalDisk snapshotDisk = primaryPool.getPhysicalDisk(cmd .getVolumePath()); Script command = new Script(_manageSnapshotPath, _cmdsTimeout, s_logger); command.add("-b", snapshotDisk.getPath()); command.add("-n", snapshotName); command.add("-p", snapshotDestPath); command.add("-t", snapshotName); String result = command.execute(); if (result != null) { s_logger.debug("Failed to backup snaptshot: " + result); return new BackupSnapshotAnswer(cmd, false, result, null, true); } /* Delete the snapshot on primary */ DomainInfo.DomainState state = null; Domain vm = null; if (vmName != null) { try { vm = getDomain(conn, cmd.getVmName()); state = vm.getInfo().state; } catch (LibvirtException e) { } } KVMStoragePool primaryStorage = _storagePoolMgr.getStoragePool( cmd.getPool().getType(), cmd.getPool().getUuid()); if (state == DomainInfo.DomainState.VIR_DOMAIN_RUNNING && !primaryStorage.isExternalSnapshot()) { String vmUuid = vm.getUUIDString(); Object[] args = new Object[] { snapshotName, vmUuid }; String snapshot = SnapshotXML.format(args); s_logger.debug(snapshot); DomainSnapshot snap = vm.snapshotLookupByName(snapshotName); snap.delete(0); /* * libvirt on RHEL6 doesn't handle resume event emitted from * qemu */ vm = getDomain(conn, cmd.getVmName()); state = vm.getInfo().state; if (state == DomainInfo.DomainState.VIR_DOMAIN_PAUSED) { vm.resume(); } } else { command = new Script(_manageSnapshotPath, _cmdsTimeout, s_logger); command.add("-d", snapshotDisk.getPath()); command.add("-n", snapshotName); result = command.execute(); if (result != null) { s_logger.debug("Failed to backup snapshot: " + result); return new BackupSnapshotAnswer(cmd, false, "Failed to backup snapshot: " + result, null, true); } } } catch (LibvirtException e) { return new BackupSnapshotAnswer(cmd, false, e.toString(), null, true); } catch (CloudRuntimeException e) { return new BackupSnapshotAnswer(cmd, false, e.toString(), null, true); } finally { if (secondaryStoragePool != null) { secondaryStoragePool.delete(); } } return new BackupSnapshotAnswer(cmd, true, null, snapshotRelPath + File.separator + snapshotName, true); } protected DeleteSnapshotBackupAnswer execute( final DeleteSnapshotBackupCommand cmd) { Long dcId = cmd.getDataCenterId(); Long accountId = cmd.getAccountId(); Long volumeId = cmd.getVolumeId(); KVMStoragePool secondaryStoragePool = null; try { secondaryStoragePool = _storagePoolMgr.getStoragePoolByURI(cmd .getSecondaryStorageUrl()); String ssPmountPath = secondaryStoragePool.getLocalPath(); String snapshotDestPath = ssPmountPath + File.separator + "snapshots" + File.separator + dcId + File.separator + accountId + File.separator + volumeId; final Script command = new Script(_manageSnapshotPath, _cmdsTimeout, s_logger); command.add("-d", snapshotDestPath); command.add("-n", cmd.getSnapshotName()); command.execute(); } catch (CloudRuntimeException e) { return new DeleteSnapshotBackupAnswer(cmd, false, e.toString()); } finally { if (secondaryStoragePool != null) { secondaryStoragePool.delete(); } } return new DeleteSnapshotBackupAnswer(cmd, true, null); } protected Answer execute(DeleteSnapshotsDirCommand cmd) { Long dcId = cmd.getDcId(); Long accountId = cmd.getAccountId(); Long volumeId = cmd.getVolumeId(); KVMStoragePool secondaryStoragePool = null; try { secondaryStoragePool = _storagePoolMgr.getStoragePoolByURI(cmd .getSecondaryStorageUrl()); String ssPmountPath = secondaryStoragePool.getLocalPath(); String snapshotDestPath = ssPmountPath + File.separator + "snapshots" + File.separator + dcId + File.separator + accountId + File.separator + volumeId; final Script command = new Script(_manageSnapshotPath, _cmdsTimeout, s_logger); command.add("-d", snapshotDestPath); command.add("-f"); command.execute(); } catch (CloudRuntimeException e) { return new Answer(cmd, false, e.toString()); } finally { if (secondaryStoragePool != null) { secondaryStoragePool.delete(); } } return new Answer(cmd, true, null); } protected CreateVolumeFromSnapshotAnswer execute( final CreateVolumeFromSnapshotCommand cmd) { try { String snapshotPath = cmd.getSnapshotUuid(); int index = snapshotPath.lastIndexOf("/"); snapshotPath = snapshotPath.substring(0, index); KVMStoragePool secondaryPool = _storagePoolMgr.getStoragePoolByURI( cmd.getSecondaryStorageUrl() + snapshotPath); KVMPhysicalDisk snapshot = secondaryPool.getPhysicalDisk(cmd .getSnapshotName()); String primaryUuid = cmd.getPrimaryStoragePoolNameLabel(); KVMStoragePool primaryPool = _storagePoolMgr .getStoragePool(cmd.getPool().getType(), primaryUuid); String volUuid = UUID.randomUUID().toString(); KVMPhysicalDisk disk = _storagePoolMgr.copyPhysicalDisk(snapshot, volUuid, primaryPool); return new CreateVolumeFromSnapshotAnswer(cmd, true, "", disk.getName()); } catch (CloudRuntimeException e) { return new CreateVolumeFromSnapshotAnswer(cmd, false, e.toString(), null); } } protected Answer execute(final UpgradeSnapshotCommand cmd) { return new Answer(cmd, true, "success"); } protected CreatePrivateTemplateAnswer execute( final CreatePrivateTemplateFromSnapshotCommand cmd) { String templateFolder = cmd.getAccountId() + File.separator + cmd.getNewTemplateId(); String templateInstallFolder = "template/tmpl/" + templateFolder; String tmplName = UUID.randomUUID().toString(); String tmplFileName = tmplName + ".qcow2"; KVMStoragePool secondaryPool = null; KVMStoragePool snapshotPool = null; try { String snapshotPath = cmd.getSnapshotUuid(); int index = snapshotPath.lastIndexOf("/"); snapshotPath = snapshotPath.substring(0, index); snapshotPool = _storagePoolMgr.getStoragePoolByURI(cmd .getSecondaryStorageUrl() + snapshotPath); KVMPhysicalDisk snapshot = snapshotPool.getPhysicalDisk(cmd .getSnapshotName()); secondaryPool = _storagePoolMgr.getStoragePoolByURI( cmd.getSecondaryStorageUrl()); String templatePath = secondaryPool.getLocalPath() + File.separator + templateInstallFolder; _storage.mkdirs(templatePath); String tmplPath = templateInstallFolder + File.separator + tmplFileName; Script command = new Script(_createTmplPath, _cmdsTimeout, s_logger); command.add("-t", templatePath); command.add("-n", tmplFileName); command.add("-f", snapshot.getPath()); command.execute(); Map<String, Object> params = new HashMap<String, Object>(); params.put(StorageLayer.InstanceConfigKey, _storage); Processor qcow2Processor = new QCOW2Processor(); qcow2Processor.configure("QCOW2 Processor", params); FormatInfo info = qcow2Processor.process(templatePath, null, tmplName); TemplateLocation loc = new TemplateLocation(_storage, templatePath); loc.create(1, true, tmplName); loc.addFormat(info); loc.save(); return new CreatePrivateTemplateAnswer(cmd, true, "", tmplPath, info.virtualSize, info.size, tmplName, info.format); } catch (ConfigurationException e) { return new CreatePrivateTemplateAnswer(cmd, false, e.getMessage()); } catch (InternalErrorException e) { return new CreatePrivateTemplateAnswer(cmd, false, e.getMessage()); } catch (IOException e) { return new CreatePrivateTemplateAnswer(cmd, false, e.getMessage()); } catch (CloudRuntimeException e) { return new CreatePrivateTemplateAnswer(cmd, false, e.getMessage()); } finally { if (secondaryPool != null) { secondaryPool.delete(); } if (snapshotPool != null) { snapshotPool.delete(); } } } protected GetStorageStatsAnswer execute(final GetStorageStatsCommand cmd) { try { KVMStoragePool sp = _storagePoolMgr.getStoragePool( cmd.getPooltype(), cmd.getStorageId()); return new GetStorageStatsAnswer(cmd, sp.getCapacity(), sp.getUsed()); } catch (CloudRuntimeException e) { return new GetStorageStatsAnswer(cmd, e.toString()); } } protected CreatePrivateTemplateAnswer execute( CreatePrivateTemplateFromVolumeCommand cmd) { String secondaryStorageURL = cmd.getSecondaryStorageUrl(); KVMStoragePool secondaryStorage = null; try { Connect conn = LibvirtConnection.getConnection(); String templateFolder = cmd.getAccountId() + File.separator + cmd.getTemplateId() + File.separator; String templateInstallFolder = "/template/tmpl/" + templateFolder; secondaryStorage = _storagePoolMgr.getStoragePoolByURI( secondaryStorageURL); KVMStoragePool primary = _storagePoolMgr.getStoragePool( cmd.getPool().getType(), cmd.getPrimaryStoragePoolNameLabel()); KVMPhysicalDisk disk = primary.getPhysicalDisk(cmd.getVolumePath()); String tmpltPath = secondaryStorage.getLocalPath() + File.separator + templateInstallFolder; _storage.mkdirs(tmpltPath); if (primary.getType() != StoragePoolType.RBD) { Script command = new Script(_createTmplPath, _cmdsTimeout, s_logger); command.add("-f", disk.getPath()); command.add("-t", tmpltPath); command.add("-n", cmd.getUniqueName() + ".qcow2"); String result = command.execute(); if (result != null) { s_logger.debug("failed to create template: " + result); return new CreatePrivateTemplateAnswer(cmd, false, result); } } else { s_logger.debug("Converting RBD disk " + disk.getPath() + " into template " + cmd.getUniqueName()); Script.runSimpleBashScript("qemu-img convert" + " -f raw -O qcow2 " + KVMPhysicalDisk.RBDStringBuilder(primary.getSourceHost(), primary.getSourcePort(), primary.getAuthUserName(), primary.getAuthSecret(), disk.getPath()) + " " + tmpltPath + "/" + cmd.getUniqueName() + ".qcow2"); File templateProp = new File(tmpltPath + "/template.properties"); if (!templateProp.exists()) { templateProp.createNewFile(); } String templateContent = "filename=" + cmd.getUniqueName() + ".qcow2" + System.getProperty("line.separator"); DateFormat dateFormat = new SimpleDateFormat("MM_dd_yyyy"); Date date = new Date(); templateContent += "snapshot.name=" + dateFormat.format(date) + System.getProperty("line.separator"); FileOutputStream templFo = new FileOutputStream(templateProp); templFo.write(templateContent.getBytes()); templFo.flush(); templFo.close(); } Map<String, Object> params = new HashMap<String, Object>(); params.put(StorageLayer.InstanceConfigKey, _storage); Processor qcow2Processor = new QCOW2Processor(); qcow2Processor.configure("QCOW2 Processor", params); FormatInfo info = qcow2Processor.process(tmpltPath, null, cmd.getUniqueName()); TemplateLocation loc = new TemplateLocation(_storage, tmpltPath); loc.create(1, true, cmd.getUniqueName()); loc.addFormat(info); loc.save(); return new CreatePrivateTemplateAnswer(cmd, true, null, templateInstallFolder + cmd.getUniqueName() + ".qcow2", info.virtualSize, info.size, cmd.getUniqueName(), ImageFormat.QCOW2); } catch (LibvirtException e) { s_logger.debug("Failed to get secondary storage pool: " + e.toString()); return new CreatePrivateTemplateAnswer(cmd, false, e.toString()); } catch (InternalErrorException e) { return new CreatePrivateTemplateAnswer(cmd, false, e.toString()); } catch (IOException e) { return new CreatePrivateTemplateAnswer(cmd, false, e.toString()); } catch (ConfigurationException e) { return new CreatePrivateTemplateAnswer(cmd, false, e.toString()); } catch (CloudRuntimeException e) { return new CreatePrivateTemplateAnswer(cmd, false, e.toString()); } finally { if (secondaryStorage != null) { secondaryStorage.delete(); } } } protected PrimaryStorageDownloadAnswer execute( final PrimaryStorageDownloadCommand cmd) { String tmplturl = cmd.getUrl(); int index = tmplturl.lastIndexOf("/"); String mountpoint = tmplturl.substring(0, index); String tmpltname = null; if (index < tmplturl.length() - 1) { tmpltname = tmplturl.substring(index + 1); } KVMPhysicalDisk tmplVol = null; KVMStoragePool secondaryPool = null; try { secondaryPool = _storagePoolMgr.getStoragePoolByURI(mountpoint); /* Get template vol */ if (tmpltname == null) { secondaryPool.refresh(); List<KVMPhysicalDisk> disks = secondaryPool.listPhysicalDisks(); if (disks == null || disks.isEmpty()) { return new PrimaryStorageDownloadAnswer( "Failed to get volumes from pool: " + secondaryPool.getUuid()); } for (KVMPhysicalDisk disk : disks) { if (disk.getName().endsWith("qcow2")) { tmplVol = disk; break; } } if (tmplVol == null) { return new PrimaryStorageDownloadAnswer( "Failed to get template from pool: " + secondaryPool.getUuid()); } } else { tmplVol = secondaryPool.getPhysicalDisk(tmpltname); } /* Copy volume to primary storage */ KVMStoragePool primaryPool = _storagePoolMgr.getStoragePool( cmd.getPool().getType(), cmd.getPoolUuid()); KVMPhysicalDisk primaryVol = _storagePoolMgr.copyPhysicalDisk( tmplVol, UUID.randomUUID().toString(), primaryPool); return new PrimaryStorageDownloadAnswer(primaryVol.getName(), primaryVol.getSize()); } catch (CloudRuntimeException e) { return new PrimaryStorageDownloadAnswer(e.toString()); } finally { if (secondaryPool != null) { secondaryPool.delete(); } } } protected Answer execute(CreateStoragePoolCommand cmd) { return new Answer(cmd, true, "success"); } protected Answer execute(ModifyStoragePoolCommand cmd) { String poolType = cmd.getPool().getType().toString(); KVMStoragePool storagepool = _storagePoolMgr.createStoragePool(cmd .getPool().getUuid(), cmd.getPool().getHost(), cmd.getPool().getPort(), cmd.getPool().getPath(), cmd.getPool().getUserInfo(), cmd.getPool().getType()); if (storagepool == null) { return new Answer(cmd, false, " Failed to create storage pool"); } Map<String, TemplateInfo> tInfo = new HashMap<String, TemplateInfo>(); ModifyStoragePoolAnswer answer = new ModifyStoragePoolAnswer(cmd, storagepool.getCapacity(), storagepool.getUsed(), tInfo); return answer; } private Answer execute(SecurityGroupRulesCmd cmd) { String vif = null; String brname = null; try { Connect conn = LibvirtConnection.getConnection(); List<InterfaceDef> nics = getInterfaces(conn, cmd.getVmName()); vif = nics.get(0).getDevName(); brname = nics.get(0).getBrName(); } catch (LibvirtException e) { return new SecurityGroupRuleAnswer(cmd, false, e.toString()); } boolean result = add_network_rules(cmd.getVmName(), Long.toString(cmd.getVmId()), cmd.getGuestIp(), cmd.getSignature(), Long.toString(cmd.getSeqNum()), cmd.getGuestMac(), cmd.stringifyRules(), vif, brname); if (!result) { s_logger.warn("Failed to program network rules for vm " + cmd.getVmName()); return new SecurityGroupRuleAnswer(cmd, false, "programming network rules failed"); } else { s_logger.debug("Programmed network rules for vm " + cmd.getVmName() + " guestIp=" + cmd.getGuestIp() + ",ingress numrules=" + cmd.getIngressRuleSet().length + ",egress numrules=" + cmd.getEgressRuleSet().length); return new SecurityGroupRuleAnswer(cmd); } } private Answer execute(CleanupNetworkRulesCmd cmd) { boolean result = cleanup_rules(); return new Answer(cmd, result, ""); } protected GetVncPortAnswer execute(GetVncPortCommand cmd) { try { Connect conn = LibvirtConnection.getConnection(); Integer vncPort = getVncPort(conn, cmd.getName()); return new GetVncPortAnswer(cmd, _privateIp, 5900 + vncPort); } catch (Exception e) { return new GetVncPortAnswer(cmd, e.toString()); } } protected Answer execute(final CheckConsoleProxyLoadCommand cmd) { return executeProxyLoadScan(cmd, cmd.getProxyVmId(), cmd.getProxyVmName(), cmd.getProxyManagementIp(), cmd.getProxyCmdPort()); } protected Answer execute(final WatchConsoleProxyLoadCommand cmd) { return executeProxyLoadScan(cmd, cmd.getProxyVmId(), cmd.getProxyVmName(), cmd.getProxyManagementIp(), cmd.getProxyCmdPort()); } protected MaintainAnswer execute(MaintainCommand cmd) { return new MaintainAnswer(cmd); } private Answer executeProxyLoadScan(final Command cmd, final long proxyVmId, final String proxyVmName, final String proxyManagementIp, final int cmdPort) { String result = null; final StringBuffer sb = new StringBuffer(); sb.append("http://").append(proxyManagementIp).append(":" + cmdPort) .append("/cmd/getstatus"); boolean success = true; try { final URL url = new URL(sb.toString()); final URLConnection conn = url.openConnection(); final InputStream is = conn.getInputStream(); final BufferedReader reader = new BufferedReader( new InputStreamReader(is)); final StringBuilder sb2 = new StringBuilder(); String line = null; try { while ((line = reader.readLine()) != null) { sb2.append(line + "\n"); } result = sb2.toString(); } catch (final IOException e) { success = false; } finally { try { is.close(); } catch (final IOException e) { s_logger.warn("Exception when closing , console proxy address : " + proxyManagementIp); success = false; } } } catch (final IOException e) { s_logger.warn("Unable to open console proxy command port url, console proxy address : " + proxyManagementIp); success = false; } return new ConsoleProxyLoadAnswer(cmd, proxyVmId, proxyVmName, success, result); } private Answer execute(AttachIsoCommand cmd) { try { Connect conn = LibvirtConnection.getConnection(); attachOrDetachISO(conn, cmd.getVmName(), cmd.getIsoPath(), cmd.isAttach()); } catch (LibvirtException e) { return new Answer(cmd, false, e.toString()); } catch (URISyntaxException e) { return new Answer(cmd, false, e.toString()); } catch (InternalErrorException e) { return new Answer(cmd, false, e.toString()); } return new Answer(cmd); } private AttachVolumeAnswer execute(AttachVolumeCommand cmd) { try { Connect conn = LibvirtConnection.getConnection(); KVMStoragePool primary = _storagePoolMgr.getStoragePool( cmd.getPooltype(), cmd.getPoolUuid()); KVMPhysicalDisk disk = primary.getPhysicalDisk(cmd.getVolumePath()); attachOrDetachDisk(conn, cmd.getAttach(), cmd.getVmName(), disk, cmd.getDeviceId().intValue()); } catch (LibvirtException e) { return new AttachVolumeAnswer(cmd, e.toString()); } catch (InternalErrorException e) { return new AttachVolumeAnswer(cmd, e.toString()); } return new AttachVolumeAnswer(cmd, cmd.getDeviceId()); } private Answer execute(ReadyCommand cmd) { return new ReadyAnswer(cmd); } protected State convertToState(DomainInfo.DomainState ps) { final State state = s_statesTable.get(ps); return state == null ? State.Unknown : state; } protected State getVmState(Connect conn, final String vmName) { int retry = 3; Domain vms = null; while (retry-- > 0) { try { vms = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vmName .getBytes())); State s = convertToState(vms.getInfo().state); return s; } catch (final LibvirtException e) { s_logger.warn("Can't get vm state " + vmName + e.getMessage() + "retry:" + retry); } catch (Exception e) { s_logger.warn("Can't get vm state " + vmName + e.getMessage() + "retry:" + retry); } finally { try { if (vms != null) { vms.free(); } } catch (final LibvirtException e) { } } } return State.Stopped; } private Answer execute(CheckVirtualMachineCommand cmd) { try { Connect conn = LibvirtConnection.getConnection(); final State state = getVmState(conn, cmd.getVmName()); Integer vncPort = null; if (state == State.Running) { vncPort = getVncPort(conn, cmd.getVmName()); synchronized (_vms) { _vms.put(cmd.getVmName(), State.Running); } } return new CheckVirtualMachineAnswer(cmd, state, vncPort); } catch (LibvirtException e) { return new CheckVirtualMachineAnswer(cmd, e.getMessage()); } } private Answer execute(PingTestCommand cmd) { String result = null; final String computingHostIp = cmd.getComputingHostIp(); // TODO, split // the // command // into 2 // types if (computingHostIp != null) { result = doPingTest(computingHostIp); } else if (cmd.getRouterIp() != null && cmd.getPrivateIp() != null) { result = doPingTest(cmd.getRouterIp(), cmd.getPrivateIp()); } else { return new Answer(cmd, false, "routerip and private ip is null"); } if (result != null) { return new Answer(cmd, false, result); } return new Answer(cmd); } private String doPingTest(final String computingHostIp) { final Script command = new Script(_pingTestPath, 10000, s_logger); command.add("-h", computingHostIp); return command.execute(); } private String doPingTest(final String domRIp, final String vmIp) { final Script command = new Script(_pingTestPath, 10000, s_logger); command.add("-i", domRIp); command.add("-p", vmIp); return command.execute(); } private synchronized Answer execute(MigrateCommand cmd) { String vmName = cmd.getVmName(); State state = null; String result = null; synchronized (_vms) { state = _vms.get(vmName); _vms.put(vmName, State.Stopping); } List<InterfaceDef> ifaces = null; Domain dm = null; Connect dconn = null; Domain destDomain = null; Connect conn = null; try { conn = LibvirtConnection.getConnection(); ifaces = getInterfaces(conn, vmName); dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vmName .getBytes())); dconn = new Connect("qemu+tcp://" + cmd.getDestinationIp() + "/system"); /* * Hard code lm flags: VIR_MIGRATE_LIVE(1<<0) and * VIR_MIGRATE_PERSIST_DEST(1<<3) */ destDomain = dm.migrate(dconn, (1 << 0) | (1 << 3), vmName, "tcp:" + cmd.getDestinationIp(), _migrateSpeed); } catch (LibvirtException e) { s_logger.debug("Can't migrate domain: " + e.getMessage()); result = e.getMessage(); } catch (Exception e) { s_logger.debug("Can't migrate domain: " + e.getMessage()); result = e.getMessage(); } finally { try { if (dm != null) { dm.free(); } if (dconn != null) { dconn.close(); } if (destDomain != null) { destDomain.free(); } } catch (final LibvirtException e) { } } if (result != null) { synchronized (_vms) { _vms.put(vmName, state); } } else { destroy_network_rules_for_vm(conn, vmName); for (InterfaceDef iface : ifaces) { _vifDriver.unplug(iface); } cleanupVM(conn, vmName, getVnetId(VirtualMachineName.getVnet(vmName))); } return new MigrateAnswer(cmd, result == null, result, null); } private synchronized Answer execute(PrepareForMigrationCommand cmd) { VirtualMachineTO vm = cmd.getVirtualMachine(); if (s_logger.isDebugEnabled()) { s_logger.debug("Preparing host for migrating " + vm); } NicTO[] nics = vm.getNics(); try { Connect conn = LibvirtConnection.getConnection(); for (NicTO nic : nics) { _vifDriver.plug(nic, null); } /* setup disks, e.g for iso */ VolumeTO[] volumes = vm.getDisks(); for (VolumeTO volume : volumes) { if (volume.getType() == Volume.Type.ISO) { getVolumePath(conn, volume); } } synchronized (_vms) { _vms.put(vm.getName(), State.Migrating); } return new PrepareForMigrationAnswer(cmd); } catch (LibvirtException e) { return new PrepareForMigrationAnswer(cmd, e.toString()); } catch (InternalErrorException e) { return new PrepareForMigrationAnswer(cmd, e.toString()); } catch (URISyntaxException e) { return new PrepareForMigrationAnswer(cmd, e.toString()); } } private Answer execute(CheckHealthCommand cmd) { return new CheckHealthAnswer(cmd, true); } private Answer execute(GetHostStatsCommand cmd) { final Script cpuScript = new Script("/bin/bash", s_logger); cpuScript.add("-c"); cpuScript .add("idle=$(top -b -n 1|grep Cpu\\(s\\):|cut -d% -f4|cut -d, -f2);echo $idle"); final OutputInterpreter.OneLineParser parser = new OutputInterpreter.OneLineParser(); String result = cpuScript.execute(parser); if (result != null) { s_logger.debug("Unable to get the host CPU state: " + result); return new Answer(cmd, false, result); } double cpuUtil = (100.0D - Double.parseDouble(parser.getLine())); long freeMem = 0; final Script memScript = new Script("/bin/bash", s_logger); memScript.add("-c"); memScript .add("freeMem=$(free|grep cache:|awk '{print $4}');echo $freeMem"); final OutputInterpreter.OneLineParser Memparser = new OutputInterpreter.OneLineParser(); result = memScript.execute(Memparser); if (result != null) { s_logger.debug("Unable to get the host Mem state: " + result); return new Answer(cmd, false, result); } freeMem = Long.parseLong(Memparser.getLine()); Script totalMem = new Script("/bin/bash", s_logger); totalMem.add("-c"); totalMem.add("free|grep Mem:|awk '{print $2}'"); final OutputInterpreter.OneLineParser totMemparser = new OutputInterpreter.OneLineParser(); result = totalMem.execute(totMemparser); if (result != null) { s_logger.debug("Unable to get the host Mem state: " + result); return new Answer(cmd, false, result); } long totMem = Long.parseLong(totMemparser.getLine()); Pair<Double, Double> nicStats = getNicStats(_publicBridgeName); HostStatsEntry hostStats = new HostStatsEntry(cmd.getHostId(), cpuUtil, nicStats.first() / 1000, nicStats.second() / 1000, "host", totMem, freeMem, 0, 0); return new GetHostStatsAnswer(cmd, hostStats); } protected String networkUsage(final String privateIpAddress, final String option, final String vif) { Script getUsage = new Script(_routerProxyPath, s_logger); getUsage.add("netusage.sh"); getUsage.add(privateIpAddress); if (option.equals("get")) { getUsage.add("-g"); } else if (option.equals("create")) { getUsage.add("-c"); } else if (option.equals("reset")) { getUsage.add("-r"); } else if (option.equals("addVif")) { getUsage.add("-a", vif); } else if (option.equals("deleteVif")) { getUsage.add("-d", vif); } final OutputInterpreter.OneLineParser usageParser = new OutputInterpreter.OneLineParser(); String result = getUsage.execute(usageParser); if (result != null) { s_logger.debug("Failed to execute networkUsage:" + result); return null; } return usageParser.getLine(); } protected long[] getNetworkStats(String privateIP) { String result = networkUsage(privateIP, "get", null); long[] stats = new long[2]; if (result != null) { String[] splitResult = result.split(":"); int i = 0; while (i < splitResult.length - 1) { stats[0] += (new Long(splitResult[i++])).longValue(); stats[1] += (new Long(splitResult[i++])).longValue(); } } return stats; } private Answer execute(NetworkUsageCommand cmd) { if (cmd.getOption() != null && cmd.getOption().equals("create")) { String result = networkUsage(cmd.getPrivateIP(), "create", null); NetworkUsageAnswer answer = new NetworkUsageAnswer(cmd, result, 0L, 0L); return answer; } long[] stats = getNetworkStats(cmd.getPrivateIP()); NetworkUsageAnswer answer = new NetworkUsageAnswer(cmd, "", stats[0], stats[1]); return answer; } private Answer execute(RebootCommand cmd) { synchronized (_vms) { _vms.put(cmd.getVmName(), State.Starting); } try { Connect conn = LibvirtConnection.getConnection(); final String result = rebootVM(conn, cmd.getVmName()); if (result == null) { Integer vncPort = null; try { vncPort = getVncPort(conn, cmd.getVmName()); } catch (Exception e) { } get_rule_logs_for_vms(); return new RebootAnswer(cmd, null, vncPort); } else { return new RebootAnswer(cmd, result, false); } } catch (LibvirtException e) { return new RebootAnswer(cmd, e.getMessage(), false); } finally { synchronized (_vms) { _vms.put(cmd.getVmName(), State.Running); } } } protected Answer execute(RebootRouterCommand cmd) { RebootAnswer answer = (RebootAnswer) execute((RebootCommand) cmd); String result = _virtRouterResource.connect(cmd.getPrivateIpAddress()); if (result == null) { networkUsage(cmd.getPrivateIpAddress(), "create", null); return answer; } else { return new Answer(cmd, false, result); } } protected GetVmStatsAnswer execute(GetVmStatsCommand cmd) { List<String> vmNames = cmd.getVmNames(); try { HashMap<String, VmStatsEntry> vmStatsNameMap = new HashMap<String, VmStatsEntry>(); Connect conn = LibvirtConnection.getConnection(); for (String vmName : vmNames) { VmStatsEntry statEntry = getVmStat(conn, vmName); if (statEntry == null) { continue; } vmStatsNameMap.put(vmName, statEntry); } return new GetVmStatsAnswer(cmd, vmStatsNameMap); } catch (LibvirtException e) { s_logger.debug("Can't get vm stats: " + e.toString()); return new GetVmStatsAnswer(cmd, null); } } protected Answer execute(StopCommand cmd) { final String vmName = cmd.getVmName(); State state = null; synchronized (_vms) { state = _vms.get(vmName); _vms.put(vmName, State.Stopping); } try { Connect conn = LibvirtConnection.getConnection(); List<DiskDef> disks = getDisks(conn, vmName); List<InterfaceDef> ifaces = getInterfaces(conn, vmName); destroy_network_rules_for_vm(conn, vmName); String result = stopVM(conn, vmName); if (result == null) { for (DiskDef disk : disks) { if (disk.getDeviceType() == DiskDef.deviceType.CDROM && disk.getDiskPath() != null) { cleanupDisk(conn, disk); } } for (InterfaceDef iface: ifaces) { _vifDriver.unplug(iface); } } final String result2 = cleanupVnet(conn, cmd.getVnet()); if (result != null && result2 != null) { result = result2 + result; } state = State.Stopped; return new StopAnswer(cmd, result, 0, true); } catch (LibvirtException e) { return new StopAnswer(cmd, e.getMessage(), false); } finally { synchronized (_vms) { if (state != null) { _vms.put(vmName, state); } else { _vms.remove(vmName); } } } } protected Answer execute(ModifySshKeysCommand cmd) { File sshKeysDir = new File(_SSHKEYSPATH); String result = null; if (!sshKeysDir.exists()) { // Change permissions for the 700 Script script = new Script("mkdir", _timeout, s_logger); script.add("-m","700"); script.add(_SSHKEYSPATH); script.execute(); if(!sshKeysDir.exists()) { s_logger.debug("failed to create directory " + _SSHKEYSPATH); } } File pubKeyFile = new File(_SSHPUBKEYPATH); if (!pubKeyFile.exists()) { try { pubKeyFile.createNewFile(); } catch (IOException e) { result = "Failed to create file: " + e.toString(); s_logger.debug(result); } } if (pubKeyFile.exists()) { String pubKey = cmd.getPubKey(); try { FileOutputStream pubkStream = new FileOutputStream(pubKeyFile); pubkStream.write(pubKey.getBytes()); pubkStream.close(); } catch (FileNotFoundException e) { result = "File" + _SSHPUBKEYPATH + "is not found:" + e.toString(); s_logger.debug(result); } catch (IOException e) { result = "Write file " + _SSHPUBKEYPATH + ":" + e.toString(); s_logger.debug(result); } } File prvKeyFile = new File(_SSHPRVKEYPATH); if (!prvKeyFile.exists()) { try { prvKeyFile.createNewFile(); } catch (IOException e) { result = "Failed to create file: " + e.toString(); s_logger.debug(result); } } if (prvKeyFile.exists()) { String prvKey = cmd.getPrvKey(); try { FileOutputStream prvKStream = new FileOutputStream(prvKeyFile); prvKStream.write(prvKey.getBytes()); prvKStream.close(); } catch (FileNotFoundException e) { result = "File" + _SSHPRVKEYPATH + "is not found:" + e.toString(); s_logger.debug(result); } catch (IOException e) { result = "Write file " + _SSHPRVKEYPATH + ":" + e.toString(); s_logger.debug(result); } Script script = new Script("chmod", _timeout, s_logger); script.add("600", _SSHPRVKEYPATH); script.execute(); } if (result != null) { return new Answer(cmd, false, result); } else { return new Answer(cmd, true, null); } } protected void handleVmStartFailure(Connect conn, String vmName, LibvirtVMDef vm) { if (vm != null && vm.getDevices() != null) { cleanupVMNetworks(conn, vm.getDevices().getInterfaces()); } } protected LibvirtVMDef createVMFromSpec(VirtualMachineTO vmTO) { LibvirtVMDef vm = new LibvirtVMDef(); vm.setHvsType(_hypervisorType); vm.setDomainName(vmTO.getName()); vm.setDomUUID(UUID.nameUUIDFromBytes(vmTO.getName().getBytes()) .toString()); vm.setDomDescription(vmTO.getOs()); GuestDef guest = new GuestDef(); guest.setGuestType(GuestDef.guestType.KVM); guest.setGuestArch(vmTO.getArch()); guest.setMachineType("pc"); guest.setBootOrder(GuestDef.bootOrder.CDROM); guest.setBootOrder(GuestDef.bootOrder.HARDISK); vm.addComp(guest); GuestResourceDef grd = new GuestResourceDef(); grd.setMemorySize(vmTO.getMinRam() / 1024); grd.setVcpuNum(vmTO.getCpus()); vm.addComp(grd); CpuTuneDef ctd = new CpuTuneDef(); ctd.setShares(vmTO.getCpus() * vmTO.getSpeed()); vm.addComp(ctd); FeaturesDef features = new FeaturesDef(); features.addFeatures("pae"); features.addFeatures("apic"); features.addFeatures("acpi"); vm.addComp(features); TermPolicy term = new TermPolicy(); term.setCrashPolicy("destroy"); term.setPowerOffPolicy("destroy"); term.setRebootPolicy("restart"); vm.addComp(term); ClockDef clock = new ClockDef(); if (vmTO.getOs().startsWith("Windows")) { clock.setClockOffset(ClockDef.ClockOffset.LOCALTIME); clock.setTimer("rtc", "catchup", null); } vm.addComp(clock); DevicesDef devices = new DevicesDef(); devices.setEmulatorPath(_hypervisorPath); SerialDef serial = new SerialDef("pty", null, (short) 0); devices.addDevice(serial); ConsoleDef console = new ConsoleDef("pty", null, null, (short) 0); devices.addDevice(console); GraphicDef grap = new GraphicDef("vnc", (short) 0, true, vmTO.getVncAddr(), null, null); devices.addDevice(grap); InputDef input = new InputDef("tablet", "usb"); devices.addDevice(input); vm.addComp(devices); return vm; } protected void createVifs(VirtualMachineTO vmSpec, LibvirtVMDef vm) throws InternalErrorException, LibvirtException { NicTO[] nics = vmSpec.getNics(); for (int i = 0; i < nics.length; i++) { for (NicTO nic : vmSpec.getNics()) { if (nic.getDeviceId() == i) { createVif(vm, nic); } } } } protected synchronized StartAnswer execute(StartCommand cmd) { VirtualMachineTO vmSpec = cmd.getVirtualMachine(); vmSpec.setVncAddr(cmd.getHostIp()); String vmName = vmSpec.getName(); LibvirtVMDef vm = null; State state = State.Stopped; Connect conn = null; try { conn = LibvirtConnection.getConnection(); synchronized (_vms) { _vms.put(vmName, State.Starting); } vm = createVMFromSpec(vmSpec); createVbd(conn, vmSpec, vmName, vm); createVifs(vmSpec, vm); s_logger.debug("starting " + vmName + ": " + vm.toString()); startDomain(conn, vmName, vm.toString()); NicTO[] nics = vmSpec.getNics(); for (NicTO nic : nics) { if (nic.isSecurityGroupEnabled() || ( nic.getIsolationUri() != null && nic.getIsolationUri().getScheme().equalsIgnoreCase(IsolationType.Ec2.toString()))) { if (vmSpec.getType() != VirtualMachine.Type.User) { default_network_rules_for_systemvm(conn, vmName); break; } else { default_network_rules(conn, vmName, nic, vmSpec.getId()); } } } state = State.Running; return new StartAnswer(cmd); } catch (Exception e) { s_logger.warn("Exception ", e); if (conn != null) { handleVmStartFailure(conn, vmName, vm); } return new StartAnswer(cmd, e.getMessage()); } finally { synchronized (_vms) { if (state != State.Stopped) { _vms.put(vmName, state); } else { _vms.remove(vmName); } } } } private String getVolumePath(Connect conn, VolumeTO volume) throws LibvirtException, URISyntaxException { if (volume.getType() == Volume.Type.ISO && volume.getPath() != null) { String isoPath = volume.getPath(); int index = isoPath.lastIndexOf("/"); String path = isoPath.substring(0, index); String name = isoPath.substring(index + 1); KVMStoragePool secondaryPool = _storagePoolMgr.getStoragePoolByURI( path); KVMPhysicalDisk isoVol = secondaryPool.getPhysicalDisk(name); return isoVol.getPath(); } else { return volume.getPath(); } } protected void createVbd(Connect conn, VirtualMachineTO vmSpec, String vmName, LibvirtVMDef vm) throws InternalErrorException, LibvirtException, URISyntaxException { List<VolumeTO> disks = Arrays.asList(vmSpec.getDisks()); Collections.sort(disks, new Comparator<VolumeTO>() { @Override public int compare(VolumeTO arg0, VolumeTO arg1) { return arg0.getDeviceId() > arg1.getDeviceId() ? 1 : -1; } }); for (VolumeTO volume : disks) { KVMPhysicalDisk physicalDisk = null; KVMStoragePool pool = null; if (volume.getType() == Volume.Type.ISO && volume.getPath() != null) { String volPath = volume.getPath(); int index = volPath.lastIndexOf("/"); String volDir = volPath.substring(0, index); String volName = volPath.substring(index + 1); KVMStoragePool secondaryStorage = _storagePoolMgr. getStoragePoolByURI(volDir); physicalDisk = secondaryStorage.getPhysicalDisk(volName); } else if (volume.getType() != Volume.Type.ISO) { pool = _storagePoolMgr.getStoragePool( volume.getPoolType(), volume.getPoolUuid()); physicalDisk = pool.getPhysicalDisk(volume.getPath()); } String volPath = null; if (physicalDisk != null) { volPath = physicalDisk.getPath(); } DiskDef.diskBus diskBusType = getGuestDiskModel(vmSpec.getOs()); DiskDef disk = new DiskDef(); if (volume.getType() == Volume.Type.ISO) { if (volPath == null) { /* Add iso as placeholder */ disk.defISODisk(null); } else { disk.defISODisk(volPath); } } else { int devId = (int) volume.getDeviceId(); if (pool.getType() == StoragePoolType.RBD) { /* For RBD pools we use the secret mechanism in libvirt. We store the secret under the UUID of the pool, that's why we pass the pool's UUID as the authSecret */ disk.defNetworkBasedDisk(physicalDisk.getPath().replace("rbd:", ""), pool.getSourceHost(), pool.getSourcePort(), pool.getAuthUserName(), pool.getUuid(), devId, diskBusType, diskProtocol.RBD); } else if (pool.getType() == StoragePoolType.CLVM) { disk.defBlockBasedDisk(physicalDisk.getPath(), devId, diskBusType); } else { if (volume.getType() == Volume.Type.DATADISK) { disk.defFileBasedDisk(physicalDisk.getPath(), devId, DiskDef.diskBus.VIRTIO, DiskDef.diskFmtType.QCOW2); } else { disk.defFileBasedDisk(physicalDisk.getPath(), devId, diskBusType, DiskDef.diskFmtType.QCOW2); } } } vm.getDevices().addDevice(disk); } if (vmSpec.getType() != VirtualMachine.Type.User) { if (_sysvmISOPath != null) { DiskDef iso = new DiskDef(); iso.defISODisk(_sysvmISOPath); vm.getDevices().addDevice(iso); } createPatchVbd(conn, vmName, vm, vmSpec); } } private VolumeTO getVolume(VirtualMachineTO vmSpec, Volume.Type type) { VolumeTO volumes[] = vmSpec.getDisks(); for (VolumeTO volume : volumes) { if (volume.getType() == type) { return volume; } } return null; } private void createPatchVbd(Connect conn, String vmName, LibvirtVMDef vm, VirtualMachineTO vmSpec) throws LibvirtException, InternalErrorException { List<DiskDef> disks = vm.getDevices().getDisks(); DiskDef rootDisk = disks.get(0); VolumeTO rootVol = getVolume(vmSpec, Volume.Type.ROOT); String patchName = vmName + "-patchdisk"; KVMStoragePool pool = _storagePoolMgr.getStoragePool( rootVol.getPoolType(), rootVol.getPoolUuid()); String patchDiskPath = pool.getLocalPath() + "/" + patchName; List<KVMPhysicalDisk> phyDisks = pool.listPhysicalDisks(); boolean foundDisk = false; for (KVMPhysicalDisk phyDisk : phyDisks) { if (phyDisk.getPath().equals(patchDiskPath)) { foundDisk = true; break; } } if (!foundDisk) { s_logger.debug("generating new patch disk for " + vmName + " since none was found"); KVMPhysicalDisk disk = pool.createPhysicalDisk(patchName, KVMPhysicalDisk.PhysicalDiskFormat.RAW, 10L * 1024 * 1024); } else { s_logger.debug("found existing patch disk at " + patchDiskPath + " using it for " + vmName); } /* Format/create fs on this disk */ final Script command = new Script(_createvmPath, _timeout, s_logger); command.add("-f", patchDiskPath); String result = command.execute(); if (result != null) { s_logger.debug("Failed to create data disk: " + result); throw new InternalErrorException("Failed to create data disk: " + result); } /* add patch disk */ DiskDef patchDisk = new DiskDef(); if (pool.getType() == StoragePoolType.CLVM) { patchDisk.defBlockBasedDisk(patchDiskPath, 1, rootDisk.getBusType()); } else { patchDisk.defFileBasedDisk(patchDiskPath, 1, rootDisk.getBusType(), DiskDef.diskFmtType.RAW); } disks.add(patchDisk); String bootArgs = vmSpec.getBootArgs(); patchSystemVm(bootArgs, patchDiskPath, vmName); } private void createVif(LibvirtVMDef vm, NicTO nic) throws InternalErrorException, LibvirtException { vm.getDevices().addDevice( _vifDriver.plug(nic, vm.getGuestOSType()).toString()); } protected CheckSshAnswer execute(CheckSshCommand cmd) { String vmName = cmd.getName(); String privateIp = cmd.getIp(); int cmdPort = cmd.getPort(); if (s_logger.isDebugEnabled()) { s_logger.debug("Ping command port, " + privateIp + ":" + cmdPort); } try { String result = _virtRouterResource.connect(privateIp, cmdPort); if (result != null) { return new CheckSshAnswer(cmd, "Can not ping System vm " + vmName + "due to:" + result); } } catch (Exception e) { return new CheckSshAnswer(cmd, e); } if (s_logger.isDebugEnabled()) { s_logger.debug("Ping command port succeeded for vm " + vmName); } return new CheckSshAnswer(cmd); } private boolean cleanupDisk(Connect conn, DiskDef disk) { // need to umount secondary storage String path = disk.getDiskPath(); String poolUuid = null; if (path != null) { String[] token = path.split("/"); if (token.length > 3) { poolUuid = token[2]; } } if (poolUuid == null) { return true; } try { // we use libvirt as storage adaptor since we passed a libvirt // connection to cleanupDisk. We pass a storage type that maps // to libvirt adaptor. KVMStoragePool pool = _storagePoolMgr.getStoragePool( StoragePoolType.Filesystem, poolUuid); if (pool != null) { pool.delete(); } return true; } catch (CloudRuntimeException e) { return false; } } protected synchronized String attachOrDetachISO(Connect conn, String vmName, String isoPath, boolean isAttach) throws LibvirtException, URISyntaxException, InternalErrorException { String isoXml = null; if (isoPath != null && isAttach) { int index = isoPath.lastIndexOf("/"); String path = isoPath.substring(0, index); String name = isoPath.substring(index + 1); KVMStoragePool secondaryPool = _storagePoolMgr.getStoragePoolByURI( path); KVMPhysicalDisk isoVol = secondaryPool.getPhysicalDisk(name); isoPath = isoVol.getPath(); DiskDef iso = new DiskDef(); iso.defISODisk(isoPath); isoXml = iso.toString(); } else { DiskDef iso = new DiskDef(); iso.defISODisk(null); isoXml = iso.toString(); } List<DiskDef> disks = getDisks(conn, vmName); String result = attachOrDetachDevice(conn, true, vmName, isoXml); if (result == null && !isAttach) { for (DiskDef disk : disks) { if (disk.getDeviceType() == DiskDef.deviceType.CDROM) { cleanupDisk(conn, disk); } } } return result; } protected synchronized String attachOrDetachDisk(Connect conn, boolean attach, String vmName, KVMPhysicalDisk attachingDisk, int devId) throws LibvirtException, InternalErrorException { List<DiskDef> disks = null; Domain dm = null; DiskDef diskdef = null; try { if (!attach) { dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vmName .getBytes())); LibvirtDomainXMLParser parser = new LibvirtDomainXMLParser(); String xml = dm.getXMLDesc(0); parser.parseDomainXML(xml); disks = parser.getDisks(); for (DiskDef disk : disks) { String file = disk.getDiskPath(); if (file != null && file.equalsIgnoreCase(attachingDisk.getPath())) { diskdef = disk; break; } } if (diskdef == null) { throw new InternalErrorException("disk: " + attachingDisk.getPath() + " is not attached before"); } } else { diskdef = new DiskDef(); if (attachingDisk.getFormat() == PhysicalDiskFormat.QCOW2) { diskdef.defFileBasedDisk(attachingDisk.getPath(), devId, DiskDef.diskBus.VIRTIO, DiskDef.diskFmtType.QCOW2); } else if (attachingDisk.getFormat() == PhysicalDiskFormat.RAW) { diskdef.defBlockBasedDisk(attachingDisk.getPath(), devId, DiskDef.diskBus.VIRTIO); } } String xml = diskdef.toString(); return attachOrDetachDevice(conn, attach, vmName, xml); } finally { if (dm != null) { dm.free(); } } } protected synchronized String attachOrDetachDevice(Connect conn, boolean attach, String vmName, String xml) throws LibvirtException, InternalErrorException { Domain dm = null; try { dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes((vmName .getBytes()))); if (attach) { s_logger.debug("Attaching device: " + xml); dm.attachDevice(xml); } else { s_logger.debug("Detaching device: " + xml); dm.detachDevice(xml); } } catch (LibvirtException e) { if (attach) { s_logger.warn("Failed to attach device to " + vmName + ": " + e.getMessage()); } else { s_logger.warn("Failed to detach device from " + vmName + ": " + e.getMessage()); } throw e; } catch (Exception e) { throw new InternalErrorException(e.toString()); } finally { if (dm != null) { try { dm.free(); } catch (LibvirtException l) { } } } return null; } @Override public PingCommand getCurrentStatus(long id) { final HashMap<String, State> newStates = sync(); if (!_can_bridge_firewall) { return new PingRoutingCommand(com.cloud.host.Host.Type.Routing, id, newStates); } else { HashMap<String, Pair<Long, Long>> nwGrpStates = syncNetworkGroups(id); return new PingRoutingWithNwGroupsCommand(getType(), id, newStates, nwGrpStates); } } @Override public Type getType() { return Type.Routing; } private Map<String, String> getVersionStrings() { final Script command = new Script(_versionstringpath, _timeout, s_logger); KeyValueInterpreter kvi = new KeyValueInterpreter(); String result = command.execute(kvi); if (result == null) { return kvi.getKeyValues(); } else { return new HashMap<String, String>(1); } } @Override public StartupCommand[] initialize() { Map<String, State> changes = null; synchronized (_vms) { _vms.clear(); changes = sync(); } final List<Object> info = getHostInfo(); final StartupRoutingCommand cmd = new StartupRoutingCommand( (Integer) info.get(0), (Long) info.get(1), (Long) info.get(2), (Long) info.get(4), (String) info.get(3), HypervisorType.KVM, RouterPrivateIpStrategy.HostLocal); cmd.setStateChanges(changes); fillNetworkInformation(cmd); _privateIp = cmd.getPrivateIpAddress(); cmd.getHostDetails().putAll(getVersionStrings()); cmd.setPool(_pool); cmd.setCluster(_clusterId); cmd.setGatewayIpAddress(_localGateway); StartupStorageCommand sscmd = null; try { KVMStoragePool localStoragePool = _storagePoolMgr .createStoragePool(_localStorageUUID, "localhost", -1, _localStoragePath, "", StoragePoolType.Filesystem); com.cloud.agent.api.StoragePoolInfo pi = new com.cloud.agent.api.StoragePoolInfo( localStoragePool.getUuid(), cmd.getPrivateIpAddress(), _localStoragePath, _localStoragePath, StoragePoolType.Filesystem, localStoragePool.getCapacity(), localStoragePool.getUsed()); sscmd = new StartupStorageCommand(); sscmd.setPoolInfo(pi); sscmd.setGuid(pi.getUuid()); sscmd.setDataCenter(_dcId); sscmd.setResourceType(Storage.StorageResourceType.STORAGE_POOL); } catch (CloudRuntimeException e) { } if (sscmd != null) { return new StartupCommand[] { cmd, sscmd }; } else { return new StartupCommand[] { cmd }; } } protected HashMap<String, State> sync() { HashMap<String, State> newStates; HashMap<String, State> oldStates = null; final HashMap<String, State> changes = new HashMap<String, State>(); synchronized (_vms) { newStates = getAllVms(); if (newStates == null) { s_logger.debug("Unable to get the vm states so no state sync at this point."); return changes; } oldStates = new HashMap<String, State>(_vms.size()); oldStates.putAll(_vms); for (final Map.Entry<String, State> entry : newStates.entrySet()) { final String vm = entry.getKey(); State newState = entry.getValue(); final State oldState = oldStates.remove(vm); if (newState == State.Stopped && oldState != State.Stopping && oldState != null && oldState != State.Stopped) { newState = getRealPowerState(vm); } if (s_logger.isTraceEnabled()) { s_logger.trace("VM " + vm + ": libvirt has state " + newState + " and we have state " + (oldState != null ? oldState.toString() : "null")); } if (vm.startsWith("migrating")) { s_logger.debug("Migration detected. Skipping"); continue; } if (oldState == null) { _vms.put(vm, newState); s_logger.debug("Detecting a new state but couldn't find a old state so adding it to the changes: " + vm); changes.put(vm, newState); } else if (oldState == State.Starting) { if (newState == State.Running) { _vms.put(vm, newState); } else if (newState == State.Stopped) { s_logger.debug("Ignoring vm " + vm + " because of a lag in starting the vm."); } } else if (oldState == State.Migrating) { if (newState == State.Running) { s_logger.debug("Detected that an migrating VM is now running: " + vm); _vms.put(vm, newState); } } else if (oldState == State.Stopping) { if (newState == State.Stopped) { _vms.put(vm, newState); } else if (newState == State.Running) { s_logger.debug("Ignoring vm " + vm + " because of a lag in stopping the vm. "); } } else if (oldState != newState) { _vms.put(vm, newState); if (newState == State.Stopped) { if (_vmsKilled.remove(vm)) { s_logger.debug("VM " + vm + " has been killed for storage. "); newState = State.Error; } } changes.put(vm, newState); } } for (final Map.Entry<String, State> entry : oldStates.entrySet()) { final String vm = entry.getKey(); final State oldState = entry.getValue(); if (s_logger.isTraceEnabled()) { s_logger.trace("VM " + vm + " is now missing from libvirt so reporting stopped"); } if (oldState == State.Stopping) { s_logger.debug("Ignoring VM " + vm + " in transition state stopping."); _vms.remove(vm); } else if (oldState == State.Starting) { s_logger.debug("Ignoring VM " + vm + " in transition state starting."); } else if (oldState == State.Stopped) { _vms.remove(vm); } else if (oldState == State.Migrating) { s_logger.debug("Ignoring VM " + vm + " in migrating state."); } else { _vms.remove(vm); State state = State.Stopped; if (_vmsKilled.remove(entry.getKey())) { s_logger.debug("VM " + vm + " has been killed by storage monitor"); state = State.Error; } changes.put(entry.getKey(), state); } } } return changes; } protected State getRealPowerState(String vm) { int i = 0; s_logger.trace("Checking on the HALTED State"); Domain dm = null; for (; i < 5; i++) { try { Connect conn = LibvirtConnection.getConnection(); dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vm .getBytes())); DomainInfo.DomainState vps = dm.getInfo().state; if (vps != null && vps != DomainInfo.DomainState.VIR_DOMAIN_SHUTOFF && vps != DomainInfo.DomainState.VIR_DOMAIN_NOSTATE) { return convertToState(vps); } } catch (final LibvirtException e) { s_logger.trace(e.getMessage()); } catch (Exception e) { s_logger.trace(e.getMessage()); } finally { try { if (dm != null) { dm.free(); } } catch (final LibvirtException e) { } } try { Thread.sleep(1000); } catch (InterruptedException e) { } } return State.Stopped; } protected List<String> getAllVmNames(Connect conn) { ArrayList<String> la = new ArrayList<String>(); try { final String names[] = conn.listDefinedDomains(); for (int i = 0; i < names.length; i++) { la.add(names[i]); } } catch (final LibvirtException e) { s_logger.warn("Failed to list Defined domains", e); } int[] ids = null; try { ids = conn.listDomains(); } catch (final LibvirtException e) { s_logger.warn("Failed to list domains", e); return la; } Domain dm = null; for (int i = 0; i < ids.length; i++) { try { dm = conn.domainLookupByID(ids[i]); la.add(dm.getName()); } catch (final LibvirtException e) { s_logger.warn("Unable to get vms", e); } finally { try { if (dm != null) { dm.free(); } } catch (final LibvirtException e) { } } } return la; } private HashMap<String, State> getAllVms() { final HashMap<String, State> vmStates = new HashMap<String, State>(); String[] vms = null; int[] ids = null; Connect conn = null; try { conn = LibvirtConnection.getConnection(); } catch (LibvirtException e) { s_logger.debug("Failed to get connection: " + e.getMessage()); return vmStates; } try { ids = conn.listDomains(); } catch (final LibvirtException e) { s_logger.warn("Unable to listDomains", e); return null; } try { vms = conn.listDefinedDomains(); } catch (final LibvirtException e) { s_logger.warn("Unable to listDomains", e); return null; } Domain dm = null; for (int i = 0; i < ids.length; i++) { try { dm = conn.domainLookupByID(ids[i]); DomainInfo.DomainState ps = dm.getInfo().state; final State state = convertToState(ps); s_logger.trace("VM " + dm.getName() + ": powerstate = " + ps + "; vm state=" + state.toString()); String vmName = dm.getName(); vmStates.put(vmName, state); } catch (final LibvirtException e) { s_logger.warn("Unable to get vms", e); } finally { try { if (dm != null) { dm.free(); } } catch (LibvirtException e) { } } } for (int i = 0; i < vms.length; i++) { try { dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vms[i] .getBytes())); DomainInfo.DomainState ps = dm.getInfo().state; final State state = convertToState(ps); String vmName = dm.getName(); s_logger.trace("VM " + vmName + ": powerstate = " + ps + "; vm state=" + state.toString()); vmStates.put(vmName, state); } catch (final LibvirtException e) { s_logger.warn("Unable to get vms", e); } catch (Exception e) { s_logger.warn("Unable to get vms", e); } finally { try { if (dm != null) { dm.free(); } } catch (LibvirtException e) { } } } return vmStates; } protected List<Object> getHostInfo() { final ArrayList<Object> info = new ArrayList<Object>(); long speed = 0; long cpus = 0; long ram = 0; String cap = null; try { Connect conn = LibvirtConnection.getConnection(); final NodeInfo hosts = conn.nodeInfo(); boolean result = false; try { BufferedReader in = new BufferedReader( new FileReader( "/sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_max_freq")); speed = Long.parseLong(in.readLine()) / 1000; result = true; } catch (FileNotFoundException e) { } catch (IOException e) { } catch (NumberFormatException e) { } if (!result) { speed = hosts.mhz; } cpus = hosts.cpus; ram = hosts.memory * 1024L; LibvirtCapXMLParser parser = new LibvirtCapXMLParser(); parser.parseCapabilitiesXML(conn.getCapabilities()); ArrayList<String> oss = parser.getGuestOsType(); for (String s : oss) { /* * Even host supports guest os type more than hvm, we only * report hvm to management server */ if (s.equalsIgnoreCase("hvm")) { cap = "hvm"; } } } catch (LibvirtException e) { } if (isSnapshotSupported()) { cap = cap + ",snapshot"; } info.add((int) cpus); info.add(speed); info.add(ram); info.add(cap); long dom0ram = Math.min(ram / 10, 768 * 1024 * 1024L);// save a maximum // of 10% of // system ram or // 768M dom0ram = Math.max(dom0ram, _dom0MinMem); info.add(dom0ram); s_logger.debug("cpus=" + cpus + ", speed=" + speed + ", ram=" + ram + ", dom0ram=" + dom0ram); return info; } protected void cleanupVM(Connect conn, final String vmName, final String vnet) { s_logger.debug("Trying to cleanup the vnet: " + vnet); if (vnet != null) { cleanupVnet(conn, vnet); } _vmStats.remove(vmName); } protected String rebootVM(Connect conn, String vmName) { Domain dm = null; String msg = null; try { dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vmName .getBytes())); String vmDef = dm.getXMLDesc(0); s_logger.debug(vmDef); msg = stopVM(conn, vmName); msg = startDomain(conn, vmName, vmDef); return null; } catch (LibvirtException e) { s_logger.warn("Failed to create vm", e); msg = e.getMessage(); } catch (Exception e) { s_logger.warn("Failed to create vm", e); msg = e.getMessage(); } finally { try { if (dm != null) { dm.free(); } } catch (LibvirtException e) { } } return msg; } protected String stopVM(Connect conn, String vmName) { DomainInfo.DomainState state = null; Domain dm = null; s_logger.debug("Try to stop the vm at first"); String ret = stopVM(conn, vmName, false); if (ret == Script.ERR_TIMEOUT) { ret = stopVM(conn, vmName, true); } else if (ret != null) { /* * There is a race condition between libvirt and qemu: libvirt * listens on qemu's monitor fd. If qemu is shutdown, while libvirt * is reading on the fd, then libvirt will report an error. */ /* Retry 3 times, to make sure we can get the vm's status */ for (int i = 0; i < 3; i++) { try { dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vmName .getBytes())); state = dm.getInfo().state; break; } catch (LibvirtException e) { s_logger.debug("Failed to get vm status:" + e.getMessage()); } catch (Exception e) { s_logger.debug("Failed to get vm status:" + e.getMessage()); } finally { try { if (dm != null) { dm.free(); } } catch (LibvirtException l) { } } } if (state == null) { s_logger.debug("Can't get vm's status, assume it's dead already"); return null; } if (state != DomainInfo.DomainState.VIR_DOMAIN_SHUTOFF) { s_logger.debug("Try to destroy the vm"); ret = stopVM(conn, vmName, true); if (ret != null) { return ret; } } } return null; } protected String stopVM(Connect conn, String vmName, boolean force) { Domain dm = null; try { dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vmName .getBytes())); if (force) { if (dm.getInfo().state != DomainInfo.DomainState.VIR_DOMAIN_SHUTOFF) { dm.destroy(); } } else { if (dm.getInfo().state == DomainInfo.DomainState.VIR_DOMAIN_SHUTOFF) { return null; } dm.shutdown(); int retry = _stopTimeout / 2000; /* Wait for the domain gets into shutoff state */ while ((dm.getInfo().state != DomainInfo.DomainState.VIR_DOMAIN_SHUTOFF) && (retry >= 0)) { Thread.sleep(2000); retry--; } if (retry < 0) { s_logger.warn("Timed out waiting for domain " + vmName + " to shutdown gracefully"); return Script.ERR_TIMEOUT; } } } catch (LibvirtException e) { s_logger.debug("Failed to stop VM :" + vmName + " :", e); return e.getMessage(); } catch (InterruptedException ie) { s_logger.debug("Interrupted sleep"); return ie.getMessage(); } catch (Exception e) { s_logger.debug("Failed to stop VM :" + vmName + " :", e); return e.getMessage(); } finally { try { if (dm != null) { dm.free(); } } catch (LibvirtException e) { } } return null; } public synchronized String cleanupVnet(Connect conn, final String vnetId) { // VNC proxy VMs do not have vnet if (vnetId == null || vnetId.isEmpty() || isDirectAttachedNetwork(vnetId)) { return null; } final List<String> names = getAllVmNames(conn); if (!names.isEmpty()) { for (final String name : names) { if (VirtualMachineName.getVnet(name).equals(vnetId)) { return null; // Can't remove the vnet yet. } } } final Script command = new Script(_modifyVlanPath, _timeout, s_logger); command.add("-o", "delete"); command.add("-v", vnetId); return command.execute(); } protected Integer getVncPort(Connect conn, String vmName) throws LibvirtException { LibvirtDomainXMLParser parser = new LibvirtDomainXMLParser(); Domain dm = null; try { dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vmName .getBytes())); String xmlDesc = dm.getXMLDesc(0); parser.parseDomainXML(xmlDesc); return parser.getVncPort(); } finally { try { if (dm != null) { dm.free(); } } catch (LibvirtException l) { } } } private boolean IsHVMEnabled(Connect conn) { LibvirtCapXMLParser parser = new LibvirtCapXMLParser(); try { parser.parseCapabilitiesXML(conn.getCapabilities()); ArrayList<String> osTypes = parser.getGuestOsType(); for (String o : osTypes) { if (o.equalsIgnoreCase("hvm")) { return true; } } } catch (LibvirtException e) { } return false; } private String getHypervisorPath(Connect conn) { LibvirtCapXMLParser parser = new LibvirtCapXMLParser(); try { parser.parseCapabilitiesXML(conn.getCapabilities()); } catch (LibvirtException e) { s_logger.debug(e.getMessage()); } return parser.getEmulator(); } private String getGuestType(Connect conn, String vmName) { LibvirtDomainXMLParser parser = new LibvirtDomainXMLParser(); Domain dm = null; try { dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vmName .getBytes())); String xmlDesc = dm.getXMLDesc(0); parser.parseDomainXML(xmlDesc); return parser.getDescription(); } catch (LibvirtException e) { return null; } catch (Exception e) { return null; } finally { try { if (dm != null) { dm.free(); } } catch (LibvirtException l) { } } } boolean isGuestPVEnabled(String guestOS) { if (guestOS == null) { return false; } String guestOSName = KVMGuestOsMapper.getGuestOsName(guestOS); if (guestOS.startsWith("Ubuntu") || guestOSName.startsWith("Fedora 13") || guestOSName.startsWith("Fedora 12") || guestOSName.startsWith("Fedora 11") || guestOSName.startsWith("Fedora 10") || guestOSName.startsWith("Fedora 9") || guestOSName.startsWith("CentOS 5.3") || guestOSName.startsWith("CentOS 5.4") || guestOSName.startsWith("CentOS 5.5") || guestOS.startsWith("CentOS") || guestOS.startsWith("Fedora") || guestOSName.startsWith("Red Hat Enterprise Linux 5.3") || guestOSName.startsWith("Red Hat Enterprise Linux 5.4") || guestOSName.startsWith("Red Hat Enterprise Linux 5.5") || guestOSName.startsWith("Red Hat Enterprise Linux 6") || guestOS.startsWith("Debian GNU/Linux") || guestOSName.startsWith("Other PV")) { return true; } else { return false; } } public boolean isCentosHost() { if (_hvVersion <= 9) { return true; } else { return false; } } private InterfaceDef.nicModel getGuestNicModel(String guestOSType) { if (isGuestPVEnabled(guestOSType)) { return InterfaceDef.nicModel.VIRTIO; } else { return InterfaceDef.nicModel.E1000; } } private DiskDef.diskBus getGuestDiskModel(String guestOSType) { if (isGuestPVEnabled(guestOSType)) { return DiskDef.diskBus.VIRTIO; } else { return DiskDef.diskBus.IDE; } } private String getVnetIdFromBrName(String vnetBrName) { if (vnetBrName.contains("cloudVirBr")) { return vnetBrName.replaceAll("cloudVirBr", ""); } else { Pattern r = Pattern.compile("-(\\d+)$"); Matcher m = r.matcher(vnetBrName); if(m.group(1) != null || !m.group(1).isEmpty()) { return m.group(1); } else { s_logger.debug("unable to get a vlan ID from name " + vnetBrName); return ""; } } } private void cleanupVMNetworks(Connect conn, List<InterfaceDef> nics) { for (InterfaceDef nic : nics) { if (nic.getHostNetType() == hostNicType.VNET) { cleanupVnet(conn, getVnetIdFromBrName(nic.getBrName())); } } } private Domain getDomain(Connect conn, String vmName) throws LibvirtException { return conn .domainLookupByUUID(UUID.nameUUIDFromBytes(vmName.getBytes())); } protected List<InterfaceDef> getInterfaces(Connect conn, String vmName) { LibvirtDomainXMLParser parser = new LibvirtDomainXMLParser(); Domain dm = null; try { dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vmName .getBytes())); parser.parseDomainXML(dm.getXMLDesc(0)); return parser.getInterfaces(); } catch (LibvirtException e) { s_logger.debug("Failed to get dom xml: " + e.toString()); return new ArrayList<InterfaceDef>(); } catch (Exception e) { s_logger.debug("Failed to get dom xml: " + e.toString()); return new ArrayList<InterfaceDef>(); } finally { try { if (dm != null) { dm.free(); } } catch (LibvirtException e) { } } } protected List<DiskDef> getDisks(Connect conn, String vmName) { LibvirtDomainXMLParser parser = new LibvirtDomainXMLParser(); Domain dm = null; try { dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vmName .getBytes())); parser.parseDomainXML(dm.getXMLDesc(0)); return parser.getDisks(); } catch (LibvirtException e) { s_logger.debug("Failed to get dom xml: " + e.toString()); return new ArrayList<DiskDef>(); } catch (Exception e) { s_logger.debug("Failed to get dom xml: " + e.toString()); return new ArrayList<DiskDef>(); } finally { try { if (dm != null) { dm.free(); } } catch (LibvirtException e) { } } } private String executeBashScript(String script) { Script command = new Script("/bin/bash", _timeout, s_logger); command.add("-c"); command.add(script); return command.execute(); } private String executeBashScript(String script, OutputInterpreter parser) { Script command = new Script("/bin/bash", _timeout, s_logger); command.add("-c"); command.add(script); return command.execute(parser); } private void deletExitingLinkLocalRoutTable(String linkLocalBr) { Script command = new Script("/bin/bash", _timeout); command.add("-c"); command.add("ip route | grep " + NetUtils.getLinkLocalCIDR()); OutputInterpreter.AllLinesParser parser = new OutputInterpreter.AllLinesParser(); String result = command.execute(parser); boolean foundLinkLocalBr = false; if (result == null && parser.getLines() != null) { String[] lines = parser.getLines().split("\\n"); for (String line : lines) { String[] tokens = line.split(" "); if (!tokens[2].equalsIgnoreCase(linkLocalBr)) { Script.runSimpleBashScript("ip route del " + NetUtils.getLinkLocalCIDR()); } else { foundLinkLocalBr = true; } } } if (!foundLinkLocalBr) { Script.runSimpleBashScript("ip route add " + NetUtils.getLinkLocalCIDR() + " dev " + linkLocalBr + " src " + NetUtils.getLinkLocalGateway()); } } private class vmStats { long _usedTime; long _tx; long _rx; Calendar _timestamp; } private VmStatsEntry getVmStat(Connect conn, String vmName) throws LibvirtException { Domain dm = null; try { dm = getDomain(conn, vmName); DomainInfo info = dm.getInfo(); VmStatsEntry stats = new VmStatsEntry(); stats.setNumCPUs(info.nrVirtCpu); stats.setEntityType("vm"); /* get cpu utilization */ vmStats oldStats = null; Calendar now = Calendar.getInstance(); oldStats = _vmStats.get(vmName); long elapsedTime = 0; if (oldStats != null) { elapsedTime = now.getTimeInMillis() - oldStats._timestamp.getTimeInMillis(); double utilization = (info.cpuTime - oldStats._usedTime) / ((double) elapsedTime * 1000000); NodeInfo node = conn.nodeInfo(); utilization = utilization / node.cpus; if(utilization > 0){ stats.setCPUUtilization(utilization * 100); } } /* get network stats */ List<InterfaceDef> vifs = getInterfaces(conn, vmName); long rx = 0; long tx = 0; for (InterfaceDef vif : vifs) { DomainInterfaceStats ifStats = dm.interfaceStats(vif .getDevName()); rx += ifStats.rx_bytes; tx += ifStats.tx_bytes; } if (oldStats != null) { long deltarx = rx - oldStats._rx; if (deltarx > 0) stats.setNetworkReadKBs(deltarx / 1000); long deltatx = tx - oldStats._tx; if (deltatx > 0) stats.setNetworkWriteKBs(deltatx / 1000); } vmStats newStat = new vmStats(); newStat._usedTime = info.cpuTime; newStat._rx = rx; newStat._tx = tx; newStat._timestamp = now; _vmStats.put(vmName, newStat); return stats; } finally { if (dm != null) { dm.free(); } } } private boolean can_bridge_firewall(String prvNic) { Script cmd = new Script(_securityGroupPath, _timeout, s_logger); cmd.add("can_bridge_firewall"); cmd.add(prvNic); String result = cmd.execute(); if (result != null) { return false; } return true; } protected boolean destroy_network_rules_for_vm(Connect conn, String vmName) { if (!_can_bridge_firewall) { return false; } String vif = null; List<InterfaceDef> intfs = getInterfaces(conn, vmName); if (intfs.size() > 0) { InterfaceDef intf = intfs.get(0); vif = intf.getDevName(); } Script cmd = new Script(_securityGroupPath, _timeout, s_logger); cmd.add("destroy_network_rules_for_vm"); cmd.add("--vmname", vmName); if (vif != null) { cmd.add("--vif", vif); } String result = cmd.execute(); if (result != null) { return false; } return true; } protected boolean default_network_rules(Connect conn, String vmName, NicTO nic, Long vmId) { if (!_can_bridge_firewall) { return false; } List<InterfaceDef> intfs = getInterfaces(conn, vmName); if (intfs.size() < nic.getDeviceId()) { return false; } InterfaceDef intf = intfs.get(nic.getDeviceId()); String brname = intf.getBrName(); String vif = intf.getDevName(); Script cmd = new Script(_securityGroupPath, _timeout, s_logger); cmd.add("default_network_rules"); cmd.add("--vmname", vmName); cmd.add("--vmid", vmId.toString()); if (nic.getIp() != null) { cmd.add("--vmip", nic.getIp()); } cmd.add("--vmmac", nic.getMac()); cmd.add("--vif", vif); cmd.add("--brname", brname); String result = cmd.execute(); if (result != null) { return false; } return true; } protected boolean post_default_network_rules(Connect conn, String vmName, NicTO nic, Long vmId, InetAddress dhcpServerIp, String hostIp, String hostMacAddr) { if (!_can_bridge_firewall) { return false; } List<InterfaceDef> intfs = getInterfaces(conn, vmName); if (intfs.size() < nic.getDeviceId()) { return false; } InterfaceDef intf = intfs.get(nic.getDeviceId()); String brname = intf.getBrName(); String vif = intf.getDevName(); Script cmd = new Script(_securityGroupPath, _timeout, s_logger); cmd.add("post_default_network_rules"); cmd.add("--vmname", vmName); cmd.add("--vmid", vmId.toString()); cmd.add("--vmip", nic.getIp()); cmd.add("--vmmac", nic.getMac()); cmd.add("--vif", vif); cmd.add("--brname", brname); if (dhcpServerIp != null) cmd.add("--dhcpSvr", dhcpServerIp.getHostAddress()); cmd.add("--hostIp", hostIp); cmd.add("--hostMacAddr", hostMacAddr); String result = cmd.execute(); if (result != null) { return false; } return true; } protected boolean default_network_rules_for_systemvm(Connect conn, String vmName) { if (!_can_bridge_firewall) { return false; } Script cmd = new Script(_securityGroupPath, _timeout, s_logger); cmd.add("default_network_rules_systemvm"); cmd.add("--vmname", vmName); cmd.add("--localbrname", _linkLocalBridgeName); String result = cmd.execute(); if (result != null) { return false; } return true; } private boolean add_network_rules(String vmName, String vmId, String guestIP, String sig, String seq, String mac, String rules, String vif, String brname) { if (!_can_bridge_firewall) { return false; } String newRules = rules.replace(" ", ";"); Script cmd = new Script(_securityGroupPath, _timeout, s_logger); cmd.add("add_network_rules"); cmd.add("--vmname", vmName); cmd.add("--vmid", vmId); cmd.add("--vmip", guestIP); cmd.add("--sig", sig); cmd.add("--seq", seq); cmd.add("--vmmac", mac); cmd.add("--vif", vif); cmd.add("--brname", brname); if (rules != null) { cmd.add("--rules", newRules); } String result = cmd.execute(); if (result != null) { return false; } return true; } private boolean cleanup_rules() { if (!_can_bridge_firewall) { return false; } Script cmd = new Script(_securityGroupPath, _timeout, s_logger); cmd.add("cleanup_rules"); String result = cmd.execute(); if (result != null) { return false; } return true; } private String get_rule_logs_for_vms() { Script cmd = new Script(_securityGroupPath, _timeout, s_logger); cmd.add("get_rule_logs_for_vms"); OutputInterpreter.OneLineParser parser = new OutputInterpreter.OneLineParser(); String result = cmd.execute(parser); if (result == null) { return parser.getLine(); } return null; } private HashMap<String, Pair<Long, Long>> syncNetworkGroups(long id) { HashMap<String, Pair<Long, Long>> states = new HashMap<String, Pair<Long, Long>>(); String result = get_rule_logs_for_vms(); s_logger.trace("syncNetworkGroups: id=" + id + " got: " + result); String[] rulelogs = result != null ? result.split(";") : new String[0]; for (String rulesforvm : rulelogs) { String[] log = rulesforvm.split(","); if (log.length != 6) { continue; } try { states.put(log[0], new Pair<Long, Long>(Long.parseLong(log[1]), Long.parseLong(log[5]))); } catch (NumberFormatException nfe) { states.put(log[0], new Pair<Long, Long>(-1L, -1L)); } } return states; } /* online snapshot supported by enhanced qemu-kvm */ private boolean isSnapshotSupported() { String result = executeBashScript("qemu-img --help|grep convert"); if (result != null) { return false; } else { return true; } } private Pair<Double, Double> getNicStats(String nicName) { double rx = 0.0; String rxFile = "/sys/class/net/" + nicName + "/statistics/rx_bytes"; String rxContent = FileUtil.readFileAsString(rxFile); if (rxContent == null) { s_logger.warn("Failed to read the rx_bytes for " + nicName + " from " + rxFile); } rx = Double.parseDouble(rxContent); double tx = 0.0; String txFile = "/sys/class/net/" + nicName + "/statistics/tx_bytes"; String txContent = FileUtil.readFileAsString(txFile); if (txContent == null) { s_logger.warn("Failed to read the tx_bytes for " + nicName + " from " + txFile); } tx = Double.parseDouble(txContent); return new Pair<Double, Double>(rx, tx); } private Answer execute(NetworkRulesSystemVmCommand cmd) { boolean success = false; Connect conn; try { conn = LibvirtConnection.getConnection(); success = default_network_rules_for_systemvm(conn, cmd.getVmName()); } catch (LibvirtException e) { // TODO Auto-generated catch block e.printStackTrace(); } return new Answer(cmd, success, ""); } @Override public void setName(String name) { // TODO Auto-generated method stub } @Override public void setConfigParams(Map<String, Object> params) { // TODO Auto-generated method stub } @Override public Map<String, Object> getConfigParams() { // TODO Auto-generated method stub return null; } @Override public int getRunLevel() { // TODO Auto-generated method stub return 0; } @Override public void setRunLevel(int level) { // TODO Auto-generated method stub } }
plugins/hypervisors/kvm/src/com/cloud/hypervisor/kvm/resource/LibvirtComputingResource.java
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.cloud.hypervisor.kvm.resource; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.InetAddress; import java.net.URISyntaxException; import java.net.URL; import java.net.URLConnection; import java.text.DateFormat; import java.text.MessageFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.ejb.Local; import javax.naming.ConfigurationException; import org.apache.log4j.Logger; import org.libvirt.Connect; import org.libvirt.Domain; import org.libvirt.DomainInfo; import org.libvirt.DomainInterfaceStats; import org.libvirt.DomainSnapshot; import org.libvirt.LibvirtException; import org.libvirt.NodeInfo; import com.cloud.agent.api.Answer; import com.cloud.agent.api.AttachIsoCommand; import com.cloud.agent.api.AttachVolumeAnswer; import com.cloud.agent.api.AttachVolumeCommand; import com.cloud.agent.api.BackupSnapshotAnswer; import com.cloud.agent.api.BackupSnapshotCommand; import com.cloud.agent.api.CheckHealthAnswer; import com.cloud.agent.api.CheckHealthCommand; import com.cloud.agent.api.CheckNetworkAnswer; import com.cloud.agent.api.CheckNetworkCommand; import com.cloud.agent.api.CheckStateCommand; import com.cloud.agent.api.CheckVirtualMachineAnswer; import com.cloud.agent.api.CheckVirtualMachineCommand; import com.cloud.agent.api.CleanupNetworkRulesCmd; import com.cloud.agent.api.Command; import com.cloud.agent.api.CreatePrivateTemplateFromSnapshotCommand; import com.cloud.agent.api.CreatePrivateTemplateFromVolumeCommand; import com.cloud.agent.api.CreateStoragePoolCommand; import com.cloud.agent.api.CreateVolumeFromSnapshotAnswer; import com.cloud.agent.api.CreateVolumeFromSnapshotCommand; import com.cloud.agent.api.DeleteSnapshotBackupAnswer; import com.cloud.agent.api.DeleteSnapshotBackupCommand; import com.cloud.agent.api.DeleteSnapshotsDirCommand; import com.cloud.agent.api.DeleteStoragePoolCommand; import com.cloud.agent.api.FenceAnswer; import com.cloud.agent.api.FenceCommand; import com.cloud.agent.api.GetHostStatsAnswer; import com.cloud.agent.api.GetHostStatsCommand; import com.cloud.agent.api.GetStorageStatsAnswer; import com.cloud.agent.api.GetStorageStatsCommand; import com.cloud.agent.api.GetVmStatsAnswer; import com.cloud.agent.api.GetVmStatsCommand; import com.cloud.agent.api.GetVncPortAnswer; import com.cloud.agent.api.GetVncPortCommand; import com.cloud.agent.api.HostStatsEntry; import com.cloud.agent.api.MaintainAnswer; import com.cloud.agent.api.MaintainCommand; import com.cloud.agent.api.ManageSnapshotAnswer; import com.cloud.agent.api.ManageSnapshotCommand; import com.cloud.agent.api.MigrateAnswer; import com.cloud.agent.api.MigrateCommand; import com.cloud.agent.api.ModifySshKeysCommand; import com.cloud.agent.api.ModifyStoragePoolAnswer; import com.cloud.agent.api.ModifyStoragePoolCommand; import com.cloud.agent.api.NetworkRulesSystemVmCommand; import com.cloud.agent.api.NetworkUsageAnswer; import com.cloud.agent.api.NetworkUsageCommand; import com.cloud.agent.api.PingCommand; import com.cloud.agent.api.PingRoutingCommand; import com.cloud.agent.api.PingRoutingWithNwGroupsCommand; import com.cloud.agent.api.PingTestCommand; import com.cloud.agent.api.PlugNicAnswer; import com.cloud.agent.api.PlugNicCommand; import com.cloud.agent.api.PrepareForMigrationAnswer; import com.cloud.agent.api.PrepareForMigrationCommand; import com.cloud.agent.api.ReadyAnswer; import com.cloud.agent.api.ReadyCommand; import com.cloud.agent.api.RebootAnswer; import com.cloud.agent.api.RebootCommand; import com.cloud.agent.api.RebootRouterCommand; import com.cloud.agent.api.SecurityGroupRuleAnswer; import com.cloud.agent.api.SecurityGroupRulesCmd; import com.cloud.agent.api.SetupGuestNetworkAnswer; import com.cloud.agent.api.SetupGuestNetworkCommand; import com.cloud.agent.api.StartAnswer; import com.cloud.agent.api.StartCommand; import com.cloud.agent.api.StartupCommand; import com.cloud.agent.api.StartupRoutingCommand; import com.cloud.agent.api.StartupStorageCommand; import com.cloud.agent.api.StopAnswer; import com.cloud.agent.api.StopCommand; import com.cloud.agent.api.UnPlugNicAnswer; import com.cloud.agent.api.UnPlugNicCommand; import com.cloud.agent.api.UpgradeSnapshotCommand; import com.cloud.agent.api.VmStatsEntry; import com.cloud.agent.api.check.CheckSshAnswer; import com.cloud.agent.api.check.CheckSshCommand; import com.cloud.agent.api.proxy.CheckConsoleProxyLoadCommand; import com.cloud.agent.api.proxy.ConsoleProxyLoadAnswer; import com.cloud.agent.api.proxy.WatchConsoleProxyLoadCommand; import com.cloud.agent.api.routing.IpAssocAnswer; import com.cloud.agent.api.routing.IpAssocCommand; import com.cloud.agent.api.routing.IpAssocVpcCommand; import com.cloud.agent.api.routing.NetworkElementCommand; import com.cloud.agent.api.routing.SetNetworkACLAnswer; import com.cloud.agent.api.routing.SetNetworkACLCommand; import com.cloud.agent.api.routing.SetSourceNatAnswer; import com.cloud.agent.api.routing.SetSourceNatCommand; import com.cloud.agent.api.storage.CopyVolumeAnswer; import com.cloud.agent.api.storage.CopyVolumeCommand; import com.cloud.agent.api.storage.CreateAnswer; import com.cloud.agent.api.storage.CreateCommand; import com.cloud.agent.api.storage.CreatePrivateTemplateAnswer; import com.cloud.agent.api.storage.DestroyCommand; import com.cloud.agent.api.storage.PrimaryStorageDownloadAnswer; import com.cloud.agent.api.storage.PrimaryStorageDownloadCommand; import com.cloud.agent.api.storage.ResizeVolumeCommand; import com.cloud.agent.api.storage.ResizeVolumeAnswer; import com.cloud.agent.api.to.IpAddressTO; import com.cloud.agent.api.to.NicTO; import com.cloud.agent.api.to.StorageFilerTO; import com.cloud.agent.api.to.VirtualMachineTO; import com.cloud.agent.api.to.VolumeTO; import com.cloud.agent.resource.virtualnetwork.VirtualRoutingResource; import com.cloud.dc.Vlan; import com.cloud.exception.InternalErrorException; import com.cloud.host.Host.Type; import com.cloud.hypervisor.Hypervisor.HypervisorType; import com.cloud.hypervisor.kvm.resource.KVMHABase.NfsStoragePool; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.ClockDef; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.ConsoleDef; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.CpuTuneDef; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.DevicesDef; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.DiskDef; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.DiskDef.diskProtocol; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.FeaturesDef; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.GraphicDef; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.GuestDef; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.GuestResourceDef; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.InputDef; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.InterfaceDef; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.InterfaceDef.hostNicType; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.SerialDef; import com.cloud.hypervisor.kvm.resource.LibvirtVMDef.TermPolicy; import com.cloud.hypervisor.kvm.storage.KVMPhysicalDisk; import com.cloud.hypervisor.kvm.storage.KVMPhysicalDisk.PhysicalDiskFormat; import com.cloud.hypervisor.kvm.storage.KVMStoragePool; import com.cloud.hypervisor.kvm.storage.KVMStoragePoolManager; import com.cloud.network.Networks.BroadcastDomainType; import com.cloud.network.Networks.IsolationType; import com.cloud.network.Networks.RouterPrivateIpStrategy; import com.cloud.network.Networks.TrafficType; import com.cloud.network.PhysicalNetworkSetupInfo; import com.cloud.resource.ServerResource; import com.cloud.resource.ServerResourceBase; import com.cloud.storage.JavaStorageLayer; import com.cloud.storage.Storage; import com.cloud.storage.Storage.ImageFormat; import com.cloud.storage.Storage.StoragePoolType; import com.cloud.storage.StorageLayer; import com.cloud.storage.Volume; import com.cloud.storage.template.Processor; import com.cloud.storage.template.Processor.FormatInfo; import com.cloud.storage.template.QCOW2Processor; import com.cloud.storage.template.TemplateInfo; import com.cloud.storage.template.TemplateLocation; import com.cloud.utils.NumbersUtil; import com.cloud.utils.Pair; import com.cloud.utils.FileUtil; import com.cloud.utils.PropertiesUtil; import com.cloud.utils.exception.CloudRuntimeException; import com.cloud.utils.net.NetUtils; import com.cloud.utils.script.OutputInterpreter; import com.cloud.utils.script.Script; import com.cloud.vm.DiskProfile; import com.cloud.vm.VirtualMachine; import com.cloud.vm.VirtualMachine.State; import com.cloud.vm.VirtualMachineName; /** * LibvirtComputingResource execute requests on the computing/routing host using * the libvirt API * * @config {@table || Param Name | Description | Values | Default || || * hypervisor.type | type of local hypervisor | string | kvm || || * hypervisor.uri | local hypervisor to connect to | URI | * qemu:///system || || domr.arch | instruction set for domr template | * string | i686 || || private.bridge.name | private bridge where the * domrs have their private interface | string | vmops0 || || * public.bridge.name | public bridge where the domrs have their public * interface | string | br0 || || private.network.name | name of the * network where the domrs have their private interface | string | * vmops-private || || private.ipaddr.start | start of the range of * private ip addresses for domrs | ip address | 192.168.166.128 || || * private.ipaddr.end | end of the range of private ip addresses for * domrs | ip address | start + 126 || || private.macaddr.start | start * of the range of private mac addresses for domrs | mac address | * 00:16:3e:77:e2:a0 || || private.macaddr.end | end of the range of * private mac addresses for domrs | mac address | start + 126 || || * pool | the parent of the storage pool hierarchy * } **/ @Local(value = { ServerResource.class }) public class LibvirtComputingResource extends ServerResourceBase implements ServerResource { private static final Logger s_logger = Logger .getLogger(LibvirtComputingResource.class); private String _modifyVlanPath; private String _versionstringpath; private String _patchdomrPath; private String _createvmPath; private String _manageSnapshotPath; private String _resizeVolumePath; private String _createTmplPath; private String _heartBeatPath; private String _securityGroupPath; private String _routerProxyPath; private String _host; private String _dcId; private String _pod; private String _clusterId; private int _migrateSpeed; private long _hvVersion; private KVMHAMonitor _monitor; private final String _SSHKEYSPATH = "/root/.ssh"; private final String _SSHPRVKEYPATH = _SSHKEYSPATH + File.separator + "id_rsa.cloud"; private final String _SSHPUBKEYPATH = _SSHKEYSPATH + File.separator + "id_rsa.pub.cloud"; private String _mountPoint = "/mnt"; StorageLayer _storage; private KVMStoragePoolManager _storagePoolMgr; private VifDriver _vifDriver; private static final class KeyValueInterpreter extends OutputInterpreter { private final Map<String, String> map = new HashMap<String, String>(); @Override public String interpret(BufferedReader reader) throws IOException { String line = null; int numLines = 0; while ((line = reader.readLine()) != null) { String[] toks = line.trim().split("="); if (toks.length < 2) { s_logger.warn("Failed to parse Script output: " + line); } else { map.put(toks[0].trim(), toks[1].trim()); } numLines++; } if (numLines == 0) { s_logger.warn("KeyValueInterpreter: no output lines?"); } return null; } public Map<String, String> getKeyValues() { return map; } } @Override protected String getDefaultScriptsDir() { return null; } protected static MessageFormat SnapshotXML = new MessageFormat( " <domainsnapshot>" + " <name>{0}</name>" + " <domain>" + " <uuid>{1}</uuid>" + " </domain>" + " </domainsnapshot>"); protected String _hypervisorType; protected String _hypervisorURI; protected String _hypervisorPath; protected String _sysvmISOPath; protected String _privNwName; protected String _privBridgeName; protected String _linkLocalBridgeName; protected String _publicBridgeName; protected String _guestBridgeName; protected String _privateIp; protected String _pool; protected String _localGateway; private boolean _can_bridge_firewall; protected String _localStoragePath; protected String _localStorageUUID; private final Map <String, String> _pifs = new HashMap<String, String>(); private final Map<String, Map<String, String>> hostNetInfo = new HashMap<String, Map<String, String>>(); private final Map<String, vmStats> _vmStats = new ConcurrentHashMap<String, vmStats>(); protected boolean _disconnected = true; protected int _timeout; protected int _cmdsTimeout; protected int _stopTimeout; protected static HashMap<DomainInfo.DomainState, State> s_statesTable; static { s_statesTable = new HashMap<DomainInfo.DomainState, State>(); s_statesTable.put(DomainInfo.DomainState.VIR_DOMAIN_SHUTOFF, State.Stopped); s_statesTable.put(DomainInfo.DomainState.VIR_DOMAIN_PAUSED, State.Running); s_statesTable.put(DomainInfo.DomainState.VIR_DOMAIN_RUNNING, State.Running); s_statesTable.put(DomainInfo.DomainState.VIR_DOMAIN_BLOCKED, State.Running); s_statesTable.put(DomainInfo.DomainState.VIR_DOMAIN_NOSTATE, State.Unknown); s_statesTable.put(DomainInfo.DomainState.VIR_DOMAIN_SHUTDOWN, State.Stopping); } protected HashMap<String, State> _vms = new HashMap<String, State>(20); protected List<String> _vmsKilled = new ArrayList<String>(); private VirtualRoutingResource _virtRouterResource; private String _pingTestPath; private int _dom0MinMem; protected enum BridgeType { NATIVE, OPENVSWITCH } protected enum defineOps { UNDEFINE_VM, DEFINE_VM } protected BridgeType _bridgeType; private String getEndIpFromStartIp(String startIp, int numIps) { String[] tokens = startIp.split("[.]"); assert (tokens.length == 4); int lastbyte = Integer.parseInt(tokens[3]); lastbyte = lastbyte + numIps; tokens[3] = Integer.toString(lastbyte); StringBuilder end = new StringBuilder(15); end.append(tokens[0]).append(".").append(tokens[1]).append(".") .append(tokens[2]).append(".").append(tokens[3]); return end.toString(); } private Map<String, Object> getDeveloperProperties() throws ConfigurationException { final File file = PropertiesUtil.findConfigFile("developer.properties"); if (file == null) { throw new ConfigurationException( "Unable to find developer.properties."); } s_logger.info("developer.properties found at " + file.getAbsolutePath()); Properties properties = new Properties(); try { properties.load(new FileInputStream(file)); String startMac = (String) properties.get("private.macaddr.start"); if (startMac == null) { throw new ConfigurationException( "Developers must specify start mac for private ip range"); } String startIp = (String) properties.get("private.ipaddr.start"); if (startIp == null) { throw new ConfigurationException( "Developers must specify start ip for private ip range"); } final Map<String, Object> params = PropertiesUtil.toMap(properties); String endIp = (String) properties.get("private.ipaddr.end"); if (endIp == null) { endIp = getEndIpFromStartIp(startIp, 16); params.put("private.ipaddr.end", endIp); } return params; } catch (final FileNotFoundException ex) { throw new CloudRuntimeException("Cannot find the file: " + file.getAbsolutePath(), ex); } catch (final IOException ex) { throw new CloudRuntimeException("IOException in reading " + file.getAbsolutePath(), ex); } } protected String getDefaultNetworkScriptsDir() { return "scripts/vm/network/vnet"; } protected String getDefaultStorageScriptsDir() { return "scripts/storage/qcow2"; } protected String getDefaultKvmScriptsDir() { return "scripts/vm/hypervisor/kvm"; } protected String getDefaultDomrScriptsDir() { return "scripts/network/domr/kvm"; } @Override public boolean configure(String name, Map<String, Object> params) throws ConfigurationException { boolean success = super.configure(name, params); if (!success) { return false; } _storage = new JavaStorageLayer(); _storage.configure("StorageLayer", params); String domrScriptsDir = (String) params.get("domr.scripts.dir"); if (domrScriptsDir == null) { domrScriptsDir = getDefaultDomrScriptsDir(); } String kvmScriptsDir = (String) params.get("kvm.scripts.dir"); if (kvmScriptsDir == null) { kvmScriptsDir = getDefaultKvmScriptsDir(); } String networkScriptsDir = (String) params.get("network.scripts.dir"); if (networkScriptsDir == null) { networkScriptsDir = getDefaultNetworkScriptsDir(); } String storageScriptsDir = (String) params.get("storage.scripts.dir"); if (storageScriptsDir == null) { storageScriptsDir = getDefaultStorageScriptsDir(); } String bridgeType = (String) params.get("network.bridge.type"); if (bridgeType == null) { _bridgeType = BridgeType.NATIVE; } else { _bridgeType = BridgeType.valueOf(bridgeType.toUpperCase()); } params.put("domr.scripts.dir", domrScriptsDir); _virtRouterResource = new VirtualRoutingResource(); success = _virtRouterResource.configure(name, params); if (!success) { return false; } _host = (String) params.get("host"); if (_host == null) { _host = "localhost"; } _dcId = (String) params.get("zone"); if (_dcId == null) { _dcId = "default"; } _pod = (String) params.get("pod"); if (_pod == null) { _pod = "default"; } _clusterId = (String) params.get("cluster"); _modifyVlanPath = Script.findScript(networkScriptsDir, "modifyvlan.sh"); if (_modifyVlanPath == null) { throw new ConfigurationException("Unable to find modifyvlan.sh"); } _versionstringpath = Script.findScript(kvmScriptsDir, "versions.sh"); if (_versionstringpath == null) { throw new ConfigurationException("Unable to find versions.sh"); } _patchdomrPath = Script.findScript(kvmScriptsDir + "/patch/", "rundomrpre.sh"); if (_patchdomrPath == null) { throw new ConfigurationException("Unable to find rundomrpre.sh"); } _heartBeatPath = Script.findScript(kvmScriptsDir, "kvmheartbeat.sh"); if (_heartBeatPath == null) { throw new ConfigurationException("Unable to find kvmheartbeat.sh"); } _createvmPath = Script.findScript(storageScriptsDir, "createvm.sh"); if (_createvmPath == null) { throw new ConfigurationException("Unable to find the createvm.sh"); } _manageSnapshotPath = Script.findScript(storageScriptsDir, "managesnapshot.sh"); if (_manageSnapshotPath == null) { throw new ConfigurationException( "Unable to find the managesnapshot.sh"); } _resizeVolumePath = Script.findScript(storageScriptsDir, "resizevolume.sh"); if (_resizeVolumePath == null) { throw new ConfigurationException( "Unable to find the resizevolume.sh"); } _createTmplPath = Script .findScript(storageScriptsDir, "createtmplt.sh"); if (_createTmplPath == null) { throw new ConfigurationException( "Unable to find the createtmplt.sh"); } _securityGroupPath = Script.findScript(networkScriptsDir, "security_group.py"); if (_securityGroupPath == null) { throw new ConfigurationException( "Unable to find the security_group.py"); } _routerProxyPath = Script.findScript("scripts/network/domr/", "router_proxy.sh"); if (_routerProxyPath == null) { throw new ConfigurationException( "Unable to find the router_proxy.sh"); } String value = (String) params.get("developer"); boolean isDeveloper = Boolean.parseBoolean(value); if (isDeveloper) { params.putAll(getDeveloperProperties()); } _pool = (String) params.get("pool"); if (_pool == null) { _pool = "/root"; } String instance = (String) params.get("instance"); _hypervisorType = (String) params.get("hypervisor.type"); if (_hypervisorType == null) { _hypervisorType = "kvm"; } _hypervisorURI = (String) params.get("hypervisor.uri"); if (_hypervisorURI == null) { _hypervisorURI = "qemu:///system"; } String startMac = (String) params.get("private.macaddr.start"); if (startMac == null) { startMac = "00:16:3e:77:e2:a0"; } String startIp = (String) params.get("private.ipaddr.start"); if (startIp == null) { startIp = "192.168.166.128"; } _pingTestPath = Script.findScript(kvmScriptsDir, "pingtest.sh"); if (_pingTestPath == null) { throw new ConfigurationException("Unable to find the pingtest.sh"); } _linkLocalBridgeName = (String) params.get("private.bridge.name"); if (_linkLocalBridgeName == null) { if (isDeveloper) { _linkLocalBridgeName = "cloud-" + instance + "-0"; } else { _linkLocalBridgeName = "cloud0"; } } _publicBridgeName = (String) params.get("public.network.device"); if (_publicBridgeName == null) { _publicBridgeName = "cloudbr0"; } _privBridgeName = (String) params.get("private.network.device"); if (_privBridgeName == null) { _privBridgeName = "cloudbr1"; } _guestBridgeName = (String) params.get("guest.network.device"); if (_guestBridgeName == null) { _guestBridgeName = _privBridgeName; } _privNwName = (String) params.get("private.network.name"); if (_privNwName == null) { if (isDeveloper) { _privNwName = "cloud-" + instance + "-private"; } else { _privNwName = "cloud-private"; } } _localStoragePath = (String) params.get("local.storage.path"); if (_localStoragePath == null) { _localStoragePath = "/var/lib/libvirt/images/"; } _localStorageUUID = (String) params.get("local.storage.uuid"); if (_localStorageUUID == null) { throw new ConfigurationException("local.storage.uuid is not set! Please set this to a valid UUID"); } value = (String) params.get("scripts.timeout"); _timeout = NumbersUtil.parseInt(value, 30 * 60) * 1000; value = (String) params.get("stop.script.timeout"); _stopTimeout = NumbersUtil.parseInt(value, 120) * 1000; value = (String) params.get("cmds.timeout"); _cmdsTimeout = NumbersUtil.parseInt(value, 7200) * 1000; value = (String) params.get("host.reserved.mem.mb"); _dom0MinMem = NumbersUtil.parseInt(value, 0) * 1024 * 1024; LibvirtConnection.initialize(_hypervisorURI); Connect conn = null; try { conn = LibvirtConnection.getConnection(); if (_bridgeType == BridgeType.OPENVSWITCH) { if (conn.getLibVirVersion() < (9 * 1000 + 11)) { throw new ConfigurationException("LibVirt version 0.9.11 required for openvswitch support, but version " + conn.getLibVirVersion() + " detected"); } } } catch (LibvirtException e) { throw new CloudRuntimeException(e.getMessage()); } /* Does node support HVM guest? If not, exit */ if (!IsHVMEnabled(conn)) { throw new ConfigurationException( "NO HVM support on this machine, please make sure: " + "1. VT/SVM is supported by your CPU, or is enabled in BIOS. " + "2. kvm modules are loaded (kvm, kvm_amd|kvm_intel)"); } _hypervisorPath = getHypervisorPath(conn); try { _hvVersion = conn.getVersion(); _hvVersion = (_hvVersion % 1000000) / 1000; } catch (LibvirtException e) { } String[] info = NetUtils.getNetworkParams(_privateNic); _monitor = new KVMHAMonitor(null, info[0], _heartBeatPath); Thread ha = new Thread(_monitor); ha.start(); _storagePoolMgr = new KVMStoragePoolManager(_storage, _monitor); _sysvmISOPath = (String) params.get("systemvm.iso.path"); if (_sysvmISOPath == null) { String[] isoPaths = { "/usr/lib64/cloud/agent/vms/systemvm.iso", "/usr/lib/cloud/agent/vms/systemvm.iso", "/usr/lib64/cloud/common/vms/systemvm.iso", "/usr/lib/cloud/common/vms/systemvm.iso" }; for (String isoPath : isoPaths) { if (_storage.exists(isoPath)) { _sysvmISOPath = isoPath; break; } } if (_sysvmISOPath == null) { s_logger.debug("Can't find system vm ISO"); } } switch (_bridgeType) { case OPENVSWITCH: getOvsPifs(); break; case NATIVE: default: getPifs(); break; } if (_pifs.get("private") == null) { s_logger.debug("Failed to get private nic name"); throw new ConfigurationException("Failed to get private nic name"); } if (_pifs.get("public") == null) { s_logger.debug("Failed to get public nic name"); throw new ConfigurationException("Failed to get public nic name"); } s_logger.debug("Found pif: " + _pifs.get("private") + " on " + _privBridgeName + ", pif: " + _pifs.get("public") + " on " + _publicBridgeName); _can_bridge_firewall = can_bridge_firewall(_pifs.get("public")); _localGateway = Script .runSimpleBashScript("ip route |grep default|awk '{print $3}'"); if (_localGateway == null) { s_logger.debug("Failed to found the local gateway"); } _mountPoint = (String) params.get("mount.path"); if (_mountPoint == null) { _mountPoint = "/mnt"; } value = (String) params.get("vm.migrate.speed"); _migrateSpeed = NumbersUtil.parseInt(value, -1); if (_migrateSpeed == -1) { //get guest network device speed _migrateSpeed = 0; String speed = Script.runSimpleBashScript("ethtool " + _pifs.get("public") + " |grep Speed | cut -d \\ -f 2"); if (speed != null) { String[] tokens = speed.split("M"); if (tokens.length == 2) { try { _migrateSpeed = Integer.parseInt(tokens[0]); } catch (Exception e) { } s_logger.debug("device " + _pifs.get("public") + " has speed: " + String.valueOf(_migrateSpeed)); } } params.put("vm.migrate.speed", String.valueOf(_migrateSpeed)); } Map<String, String> bridges = new HashMap<String, String>(); bridges.put("linklocal", _linkLocalBridgeName); bridges.put("public", _publicBridgeName); bridges.put("private", _privBridgeName); bridges.put("guest", _guestBridgeName); params.put("libvirt.host.bridges", bridges); params.put("libvirt.host.pifs", _pifs); // Load the vif driver String vifDriverName = (String) params.get("libvirt.vif.driver"); if (vifDriverName == null) { if (_bridgeType == BridgeType.OPENVSWITCH) { s_logger.info("No libvirt.vif.driver specififed. Defaults to OvsVifDriver."); vifDriverName = "com.cloud.hypervisor.kvm.resource.OvsVifDriver"; } else { s_logger.info("No libvirt.vif.driver specififed. Defaults to BridgeVifDriver."); vifDriverName = "com.cloud.hypervisor.kvm.resource.BridgeVifDriver"; } } params.put("libvirt.computing.resource", this); try { Class<?> clazz = Class.forName(vifDriverName); _vifDriver = (VifDriver) clazz.newInstance(); _vifDriver.configure(params); } catch (ClassNotFoundException e) { throw new ConfigurationException("Unable to find class for libvirt.vif.driver " + e); } catch (InstantiationException e) { throw new ConfigurationException("Unable to instantiate class for libvirt.vif.driver " + e); } catch (Exception e) { throw new ConfigurationException("Failed to initialize libvirt.vif.driver " + e); } return true; } private void getPifs() { File dir = new File("/sys/devices/virtual/net"); File[] netdevs = dir.listFiles(); List<String> bridges = new ArrayList<String>(); for (int i = 0; i < netdevs.length; i++) { File isbridge = new File(netdevs[i].getAbsolutePath() + "/bridge"); String netdevName = netdevs[i].getName(); s_logger.debug("looking in file " + netdevs[i].getAbsolutePath() + "/bridge"); if (isbridge.exists()) { s_logger.debug("Found bridge " + netdevName); bridges.add(netdevName); } } for (String bridge : bridges) { s_logger.debug("looking for pif for bridge " + bridge); String pif = getPif(bridge); if(_publicBridgeName != null && bridge.equals(_publicBridgeName)){ _pifs.put("public", pif); } if (_guestBridgeName != null && bridge.equals(_guestBridgeName)) { _pifs.put("private", pif); } _pifs.put(bridge, pif); } s_logger.debug("done looking for pifs, no more bridges"); } private void getOvsPifs() { String cmdout = Script.runSimpleBashScript("ovs-vsctl list-br | sed '{:q;N;s/\\n/%/g;t q}'"); s_logger.debug("cmdout was " + cmdout); List<String> bridges = Arrays.asList(cmdout.split("%")); for (String bridge : bridges) { s_logger.debug("looking for pif for bridge " + bridge); // String pif = getOvsPif(bridge); // Not really interested in the pif name at this point for ovs // bridges String pif = bridge; if (_publicBridgeName != null && bridge.equals(_publicBridgeName)) { _pifs.put("public", pif); } if (_guestBridgeName != null && bridge.equals(_guestBridgeName)) { _pifs.put("private", pif); } _pifs.put(bridge, pif); } s_logger.debug("done looking for pifs, no more bridges"); } private String getPif(String bridge) { String pif = matchPifFileInDirectory(bridge); File vlanfile = new File("/proc/net/vlan" + pif); if (vlanfile.isFile()) { pif = Script.runSimpleBashScript("grep ^Device\\: /proc/net/vlan/" + pif + " | awk {'print $2'}"); } return pif; } private String getOvsPif(String bridge) { String pif = Script.runSimpleBashScript("ovs-vsctl list-ports " + bridge); return pif; } private String matchPifFileInDirectory(String bridgeName){ File f = new File("/sys/devices/virtual/net/" + bridgeName + "/brif"); if (! f.isDirectory()){ s_logger.debug("failing to get physical interface from bridge" + bridgeName + ", does " + f.getAbsolutePath() + "exist?"); return ""; } File[] interfaces = f.listFiles(); for (int i = 0; i < interfaces.length; i++) { String fname = interfaces[i].getName(); s_logger.debug("matchPifFileInDirectory: file name '"+fname+"'"); if (fname.startsWith("eth") || fname.startsWith("bond") || fname.startsWith("vlan") || fname.startsWith("em")) { return fname; } } s_logger.debug("failing to get physical interface from bridge" + bridgeName + ", did not find an eth*, bond*, or vlan* in " + f.getAbsolutePath()); return ""; } private boolean checkNetwork(String networkName) { if (networkName == null) { return true; } if (_bridgeType == BridgeType.OPENVSWITCH) { return checkOvsNetwork(networkName); } else { return checkBridgeNetwork(networkName); } } private boolean checkBridgeNetwork(String networkName) { if (networkName == null) { return true; } String name = matchPifFileInDirectory(networkName); if (name == null || name.isEmpty()) { return false; } else { return true; } } private boolean checkOvsNetwork(String networkName) { s_logger.debug("Checking if network " + networkName + " exists as openvswitch bridge"); if (networkName == null) { return true; } Script command = new Script("/bin/sh", _timeout); command.add("-c"); command.add("ovs-vsctl br-exists " + networkName); String result = command.execute(null); if ("Ok".equals(result)) { return true; } else { return false; } } private String getVnetId(String vnetId) { return vnetId; } private void patchSystemVm(String cmdLine, String dataDiskPath, String vmName) throws InternalErrorException { String result; final Script command = new Script(_patchdomrPath, _timeout, s_logger); command.add("-l", vmName); command.add("-t", "all"); command.add("-d", dataDiskPath); command.add("-p", cmdLine.replaceAll(" ", "%")); result = command.execute(); if (result != null) { throw new InternalErrorException(result); } } boolean isDirectAttachedNetwork(String type) { if ("untagged".equalsIgnoreCase(type)) { return true; } else { try { Long.valueOf(type); } catch (NumberFormatException e) { return true; } return false; } } protected String startDomain(Connect conn, String vmName, String domainXML) throws LibvirtException, InternalErrorException { /* No duplicated vm, we will success, or failed */ boolean failed = false; Domain dm = null; try { dm = conn.domainDefineXML(domainXML); } catch (final LibvirtException e) { /* Duplicated defined vm */ s_logger.warn("Failed to define domain " + vmName + ": " + e.getMessage()); failed = true; } finally { try { if (dm != null) { dm.free(); } } catch (final LibvirtException e) { } } /* If failed, undefine the vm */ Domain dmOld = null; Domain dmNew = null; try { if (failed) { dmOld = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vmName .getBytes())); dmOld.undefine(); dmNew = conn.domainDefineXML(domainXML); } } catch (final LibvirtException e) { s_logger.warn("Failed to define domain (second time) " + vmName + ": " + e.getMessage()); throw e; } catch (Exception e) { s_logger.warn("Failed to define domain (second time) " + vmName + ": " + e.getMessage()); throw new InternalErrorException(e.toString()); } finally { try { if (dmOld != null) { dmOld.free(); } if (dmNew != null) { dmNew.free(); } } catch (final LibvirtException e) { } } /* Start the VM */ try { dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vmName .getBytes())); dm.create(); } catch (LibvirtException e) { s_logger.warn("Failed to start domain: " + vmName + ": " + e.getMessage()); throw e; } finally { try { if (dm != null) { dm.free(); } } catch (final LibvirtException e) { } } return null; } @Override public boolean stop() { try { Connect conn = LibvirtConnection.getConnection(); conn.close(); } catch (LibvirtException e) { } return true; } @Override public Answer executeRequest(Command cmd) { try { if (cmd instanceof StopCommand) { return execute((StopCommand) cmd); } else if (cmd instanceof GetVmStatsCommand) { return execute((GetVmStatsCommand) cmd); } else if (cmd instanceof RebootRouterCommand) { return execute((RebootRouterCommand) cmd); } else if (cmd instanceof RebootCommand) { return execute((RebootCommand) cmd); } else if (cmd instanceof GetHostStatsCommand) { return execute((GetHostStatsCommand) cmd); } else if (cmd instanceof CheckStateCommand) { return executeRequest(cmd); } else if (cmd instanceof CheckHealthCommand) { return execute((CheckHealthCommand) cmd); } else if (cmd instanceof PrepareForMigrationCommand) { return execute((PrepareForMigrationCommand) cmd); } else if (cmd instanceof MigrateCommand) { return execute((MigrateCommand) cmd); } else if (cmd instanceof PingTestCommand) { return execute((PingTestCommand) cmd); } else if (cmd instanceof CheckVirtualMachineCommand) { return execute((CheckVirtualMachineCommand) cmd); } else if (cmd instanceof ReadyCommand) { return execute((ReadyCommand) cmd); } else if (cmd instanceof AttachIsoCommand) { return execute((AttachIsoCommand) cmd); } else if (cmd instanceof AttachVolumeCommand) { return execute((AttachVolumeCommand) cmd); } else if (cmd instanceof StopCommand) { return execute((StopCommand) cmd); } else if (cmd instanceof CheckConsoleProxyLoadCommand) { return execute((CheckConsoleProxyLoadCommand) cmd); } else if (cmd instanceof WatchConsoleProxyLoadCommand) { return execute((WatchConsoleProxyLoadCommand) cmd); } else if (cmd instanceof GetVncPortCommand) { return execute((GetVncPortCommand) cmd); } else if (cmd instanceof ModifySshKeysCommand) { return execute((ModifySshKeysCommand) cmd); } else if (cmd instanceof MaintainCommand) { return execute((MaintainCommand) cmd); } else if (cmd instanceof CreateCommand) { return execute((CreateCommand) cmd); } else if (cmd instanceof DestroyCommand) { return execute((DestroyCommand) cmd); } else if (cmd instanceof PrimaryStorageDownloadCommand) { return execute((PrimaryStorageDownloadCommand) cmd); } else if (cmd instanceof CreatePrivateTemplateFromVolumeCommand) { return execute((CreatePrivateTemplateFromVolumeCommand) cmd); } else if (cmd instanceof GetStorageStatsCommand) { return execute((GetStorageStatsCommand) cmd); } else if (cmd instanceof ManageSnapshotCommand) { return execute((ManageSnapshotCommand) cmd); } else if (cmd instanceof BackupSnapshotCommand) { return execute((BackupSnapshotCommand) cmd); } else if (cmd instanceof CreateVolumeFromSnapshotCommand) { return execute((CreateVolumeFromSnapshotCommand) cmd); } else if (cmd instanceof CreatePrivateTemplateFromSnapshotCommand) { return execute((CreatePrivateTemplateFromSnapshotCommand) cmd); } else if (cmd instanceof UpgradeSnapshotCommand) { return execute((UpgradeSnapshotCommand) cmd); } else if (cmd instanceof CreateStoragePoolCommand) { return execute((CreateStoragePoolCommand) cmd); } else if (cmd instanceof ModifyStoragePoolCommand) { return execute((ModifyStoragePoolCommand) cmd); } else if (cmd instanceof SecurityGroupRulesCmd) { return execute((SecurityGroupRulesCmd) cmd); } else if (cmd instanceof DeleteStoragePoolCommand) { return execute((DeleteStoragePoolCommand) cmd); } else if (cmd instanceof FenceCommand) { return execute((FenceCommand) cmd); } else if (cmd instanceof StartCommand) { return execute((StartCommand) cmd); } else if (cmd instanceof PlugNicCommand) { return execute((PlugNicCommand) cmd); } else if (cmd instanceof UnPlugNicCommand) { return execute((UnPlugNicCommand) cmd); } else if (cmd instanceof SetupGuestNetworkCommand) { return execute((SetupGuestNetworkCommand) cmd); } else if (cmd instanceof SetNetworkACLCommand) { return execute((SetNetworkACLCommand) cmd); } else if (cmd instanceof SetSourceNatCommand) { return execute((SetSourceNatCommand) cmd); } else if (cmd instanceof IpAssocVpcCommand) { return execute((IpAssocVpcCommand) cmd); } else if (cmd instanceof IpAssocCommand) { return execute((IpAssocCommand) cmd); } else if (cmd instanceof NetworkElementCommand) { return _virtRouterResource.executeRequest(cmd); } else if (cmd instanceof CheckSshCommand) { return execute((CheckSshCommand) cmd); } else if (cmd instanceof NetworkUsageCommand) { return execute((NetworkUsageCommand) cmd); } else if (cmd instanceof NetworkRulesSystemVmCommand) { return execute((NetworkRulesSystemVmCommand) cmd); } else if (cmd instanceof CleanupNetworkRulesCmd) { return execute((CleanupNetworkRulesCmd) cmd); } else if (cmd instanceof CopyVolumeCommand) { return execute((CopyVolumeCommand) cmd); } else if (cmd instanceof ResizeVolumeCommand) { return execute((ResizeVolumeCommand) cmd); } else if (cmd instanceof CheckNetworkCommand) { return execute((CheckNetworkCommand) cmd); } else { s_logger.warn("Unsupported command "); return Answer.createUnsupportedCommandAnswer(cmd); } } catch (final IllegalArgumentException e) { return new Answer(cmd, false, e.getMessage()); } } private CheckNetworkAnswer execute(CheckNetworkCommand cmd) { List<PhysicalNetworkSetupInfo> phyNics = cmd .getPhysicalNetworkInfoList(); String errMsg = null; for (PhysicalNetworkSetupInfo nic : phyNics) { if (!checkNetwork(nic.getGuestNetworkName())) { errMsg = "Can not find network: " + nic.getGuestNetworkName(); break; } else if (!checkNetwork(nic.getPrivateNetworkName())) { errMsg = "Can not find network: " + nic.getPrivateNetworkName(); break; } else if (!checkNetwork(nic.getPublicNetworkName())) { errMsg = "Can not find network: " + nic.getPublicNetworkName(); break; } } if (errMsg != null) { return new CheckNetworkAnswer(cmd, false, errMsg); } else { return new CheckNetworkAnswer(cmd, true, null); } } private CopyVolumeAnswer execute(CopyVolumeCommand cmd) { boolean copyToSecondary = cmd.toSecondaryStorage(); String volumePath = cmd.getVolumePath(); StorageFilerTO pool = cmd.getPool(); String secondaryStorageUrl = cmd.getSecondaryStorageURL(); KVMStoragePool secondaryStoragePool = null; try { KVMStoragePool primaryPool = _storagePoolMgr.getStoragePool( pool.getType(), pool.getUuid()); String volumeName = UUID.randomUUID().toString(); if (copyToSecondary) { String destVolumeName = volumeName + ".qcow2"; KVMPhysicalDisk volume = primaryPool.getPhysicalDisk(cmd .getVolumePath()); String volumeDestPath = "/volumes/" + cmd.getVolumeId() + File.separator; secondaryStoragePool = _storagePoolMgr.getStoragePoolByURI( secondaryStorageUrl); secondaryStoragePool.createFolder(volumeDestPath); secondaryStoragePool.delete(); secondaryStoragePool = _storagePoolMgr.getStoragePoolByURI( secondaryStorageUrl + volumeDestPath); _storagePoolMgr.copyPhysicalDisk(volume, destVolumeName,secondaryStoragePool); return new CopyVolumeAnswer(cmd, true, null, null, volumeName); } else { volumePath = "/volumes/" + cmd.getVolumeId() + File.separator; secondaryStoragePool = _storagePoolMgr.getStoragePoolByURI( secondaryStorageUrl + volumePath); KVMPhysicalDisk volume = secondaryStoragePool .getPhysicalDisk(cmd.getVolumePath() + ".qcow2"); _storagePoolMgr.copyPhysicalDisk(volume, volumeName, primaryPool); return new CopyVolumeAnswer(cmd, true, null, null, volumeName); } } catch (CloudRuntimeException e) { return new CopyVolumeAnswer(cmd, false, e.toString(), null, null); } finally { if (secondaryStoragePool != null) { secondaryStoragePool.delete(); } } } protected Answer execute(DeleteStoragePoolCommand cmd) { try { _storagePoolMgr.deleteStoragePool(cmd.getPool().getType(), cmd.getPool().getUuid()); return new Answer(cmd); } catch (CloudRuntimeException e) { return new Answer(cmd, false, e.toString()); } } protected FenceAnswer execute(FenceCommand cmd) { ExecutorService executors = Executors.newSingleThreadExecutor(); List<NfsStoragePool> pools = _monitor.getStoragePools(); KVMHAChecker ha = new KVMHAChecker(pools, cmd.getHostIp()); Future<Boolean> future = executors.submit(ha); try { Boolean result = future.get(); if (result) { return new FenceAnswer(cmd, false, "Heart is still beating..."); } else { return new FenceAnswer(cmd); } } catch (InterruptedException e) { s_logger.warn("Unable to fence", e); return new FenceAnswer(cmd, false, e.getMessage()); } catch (ExecutionException e) { s_logger.warn("Unable to fence", e); return new FenceAnswer(cmd, false, e.getMessage()); } } protected Storage.StorageResourceType getStorageResourceType() { return Storage.StorageResourceType.STORAGE_POOL; } protected Answer execute(CreateCommand cmd) { StorageFilerTO pool = cmd.getPool(); DiskProfile dskch = cmd.getDiskCharacteristics(); KVMPhysicalDisk BaseVol = null; KVMStoragePool primaryPool = null; KVMPhysicalDisk vol = null; long disksize; try { primaryPool = _storagePoolMgr.getStoragePool(pool.getType(), pool.getUuid()); disksize = dskch.getSize(); if (cmd.getTemplateUrl() != null) { if(primaryPool.getType() == StoragePoolType.CLVM) { vol = templateToPrimaryDownload(cmd.getTemplateUrl(),primaryPool); } else { BaseVol = primaryPool.getPhysicalDisk(cmd.getTemplateUrl()); vol = _storagePoolMgr.createDiskFromTemplate(BaseVol, UUID .randomUUID().toString(), primaryPool); } if (vol == null) { return new Answer(cmd, false, " Can't create storage volume on storage pool"); } } else { vol = primaryPool.createPhysicalDisk(UUID.randomUUID() .toString(), dskch.getSize()); } VolumeTO volume = new VolumeTO(cmd.getVolumeId(), dskch.getType(), pool.getType(), pool.getUuid(), pool.getPath(), vol.getName(), vol.getName(), disksize, null); return new CreateAnswer(cmd, volume); } catch (CloudRuntimeException e) { s_logger.debug("Failed to create volume: " + e.toString()); return new CreateAnswer(cmd, e); } } // this is much like PrimaryStorageDownloadCommand, but keeping it separate protected KVMPhysicalDisk templateToPrimaryDownload(String templateUrl, KVMStoragePool primaryPool) { int index = templateUrl.lastIndexOf("/"); String mountpoint = templateUrl.substring(0, index); String templateName = null; if (index < templateUrl.length() - 1) { templateName = templateUrl.substring(index + 1); } KVMPhysicalDisk templateVol = null; KVMStoragePool secondaryPool = null; try { secondaryPool = _storagePoolMgr.getStoragePoolByURI(mountpoint); /* Get template vol */ if (templateName == null) { secondaryPool.refresh(); List<KVMPhysicalDisk> disks = secondaryPool.listPhysicalDisks(); if (disks == null || disks.isEmpty()) { s_logger.error("Failed to get volumes from pool: " + secondaryPool.getUuid()); return null; } for (KVMPhysicalDisk disk : disks) { if (disk.getName().endsWith("qcow2")) { templateVol = disk; break; } } if (templateVol == null) { s_logger.error("Failed to get template from pool: " + secondaryPool.getUuid()); return null; } } else { templateVol = secondaryPool.getPhysicalDisk(templateName); } /* Copy volume to primary storage */ KVMPhysicalDisk primaryVol = _storagePoolMgr.copyPhysicalDisk(templateVol, UUID.randomUUID().toString(), primaryPool); return primaryVol; } catch (CloudRuntimeException e) { s_logger.error("Failed to download template to primary storage",e); return null; } finally { if (secondaryPool != null) { secondaryPool.delete(); } } } private String getResizeScriptType (KVMStoragePool pool, KVMPhysicalDisk vol) { StoragePoolType poolType = pool.getType(); PhysicalDiskFormat volFormat = vol.getFormat(); if(pool.getType() == StoragePoolType.CLVM && volFormat == KVMPhysicalDisk.PhysicalDiskFormat.RAW) { return "CLVM"; } else if ((poolType == StoragePoolType.NetworkFilesystem || poolType == StoragePoolType.SharedMountPoint || poolType == StoragePoolType.Filesystem) && volFormat == KVMPhysicalDisk.PhysicalDiskFormat.QCOW2 ) { return "QCOW2"; } return null; } /* uses a local script now, eventually support for virStorageVolResize() will maybe work on qcow2 and lvm and we can do this in libvirt calls */ public Answer execute(ResizeVolumeCommand cmd) { String volid = cmd.getPath(); long newSize = cmd.getNewSize(); long currentSize = cmd.getCurrentSize(); String vmInstanceName = cmd.getInstanceName(); boolean shrinkOk = cmd.getShrinkOk(); StorageFilerTO spool = cmd.getPool(); try { KVMStoragePool pool = _storagePoolMgr.getStoragePool(spool.getType(), spool.getUuid()); KVMPhysicalDisk vol = pool.getPhysicalDisk(volid); String path = vol.getPath(); String type = getResizeScriptType(pool, vol); if (type == null) { return new ResizeVolumeAnswer(cmd, false, "Unsupported volume format: pool type '" + pool.getType() + "' and volume format '" + vol.getFormat() + "'"); } else if (type.equals("QCOW2") && shrinkOk) { return new ResizeVolumeAnswer(cmd, false, "Unable to shrink volumes of type " + type); } s_logger.debug("got to the stage where we execute the volume resize, params:" + path + "," + currentSize + "," + newSize + "," + type + "," + vmInstanceName + "," + shrinkOk); final Script resizecmd = new Script(_resizeVolumePath, _cmdsTimeout, s_logger); resizecmd.add("-s",String.valueOf(newSize)); resizecmd.add("-c",String.valueOf(currentSize)); resizecmd.add("-p",path); resizecmd.add("-t",type); resizecmd.add("-r",String.valueOf(shrinkOk)); resizecmd.add("-v",vmInstanceName); String result = resizecmd.execute(); if (result == null) { /* fetch new size as seen from libvirt, don't want to assume anything */ pool = _storagePoolMgr.getStoragePool(spool.getType(), spool.getUuid()); long finalSize = pool.getPhysicalDisk(volid).getVirtualSize(); s_logger.debug("after resize, size reports as " + finalSize + ", requested " + newSize); return new ResizeVolumeAnswer(cmd, true, "success", finalSize); } return new ResizeVolumeAnswer(cmd, false, result); } catch (CloudRuntimeException e) { String error = "failed to resize volume: " + e; s_logger.debug(error); return new ResizeVolumeAnswer(cmd, false, error); } } public Answer execute(DestroyCommand cmd) { VolumeTO vol = cmd.getVolume(); try { KVMStoragePool pool = _storagePoolMgr.getStoragePool( vol.getPoolType(), vol.getPoolUuid()); pool.deletePhysicalDisk(vol.getPath()); String vmName = cmd.getVmName(); String poolPath = pool.getLocalPath(); /* if vol is a root disk for a system vm, try to remove accompanying patch disk as well this is a bit tricky since the patchdisk is only a LibvirtComputingResource construct and not tracked anywhere in cloudstack */ if (vol.getType() == Volume.Type.ROOT && vmName.matches("^[rsv]-\\d+-.+$")) { File patchVbd = new File(poolPath + File.separator + vmName + "-patchdisk"); if(patchVbd.exists()){ try { _storagePoolMgr.deleteVbdByPath(vol.getPoolType(),patchVbd.getAbsolutePath()); } catch(CloudRuntimeException e) { s_logger.warn("unable to destroy patch disk '" + patchVbd.getAbsolutePath() + "' while removing root disk for " + vmName + " : " + e); } } else { s_logger.debug("file '" +patchVbd.getAbsolutePath()+ "' not found"); } } return new Answer(cmd, true, "Success"); } catch (CloudRuntimeException e) { s_logger.debug("Failed to delete volume: " + e.toString()); return new Answer(cmd, false, e.toString()); } } private String getVlanIdFromBridge(String brName) { String pif= matchPifFileInDirectory(brName); String[] pifparts = pif.split("\\."); if(pifparts.length == 2) { return pifparts[1]; } else { s_logger.debug("failed to get vlan id from bridge " + brName + "attached to physical interface" + pif); return ""; } } private void VifHotPlug(Connect conn, String vmName, String vlanId, String macAddr) throws InternalErrorException, LibvirtException { NicTO nicTO = new NicTO(); nicTO.setMac(macAddr); nicTO.setType(TrafficType.Public); if (vlanId == null) { nicTO.setBroadcastType(BroadcastDomainType.Native); } else { nicTO.setBroadcastType(BroadcastDomainType.Vlan); nicTO.setBroadcastUri(BroadcastDomainType.Vlan.toUri(vlanId)); } Domain vm = getDomain(conn, vmName); vm.attachDevice(_vifDriver.plug(nicTO, "Other PV (32-bit)").toString()); } private PlugNicAnswer execute(PlugNicCommand cmd) { Connect conn; NicTO nic = cmd.getNic(); String vmName = cmd.getVmName(); try { conn = LibvirtConnection.getConnection(); Domain vm = getDomain(conn, vmName); List<InterfaceDef> pluggedNics = getInterfaces(conn, vmName); Integer nicnum = 0; for (InterfaceDef pluggedNic : pluggedNics) { if (pluggedNic.getMacAddress().equalsIgnoreCase(nic.getMac())) { s_logger.debug("found existing nic for mac "+ pluggedNic.getMacAddress() + " at index "+nicnum); return new PlugNicAnswer(cmd, true, "success"); } nicnum++; } vm.attachDevice(_vifDriver.plug(nic, "Other PV (32-bit)").toString()); return new PlugNicAnswer(cmd, true, "success"); } catch (Exception e) { String msg = " Plug Nic failed due to " + e.toString(); s_logger.warn(msg, e); return new PlugNicAnswer(cmd, false, msg); } } private UnPlugNicAnswer execute(UnPlugNicCommand cmd) { Connect conn; NicTO nic = cmd.getNic(); String vmName = cmd.getInstanceName(); try { conn = LibvirtConnection.getConnection(); Domain vm = getDomain(conn, vmName); List<InterfaceDef> pluggedNics = getInterfaces(conn, vmName); for (InterfaceDef pluggedNic : pluggedNics) { if (pluggedNic.getMacAddress().equalsIgnoreCase(nic.getMac())) { vm.detachDevice(pluggedNic.toString()); return new UnPlugNicAnswer(cmd, true, "success"); } } return new UnPlugNicAnswer(cmd, true, "success"); } catch (Exception e) { String msg = " Unplug Nic failed due to " + e.toString(); s_logger.warn(msg, e); return new UnPlugNicAnswer(cmd, false, msg); } } private SetupGuestNetworkAnswer execute(SetupGuestNetworkCommand cmd) { Connect conn; NicTO nic = cmd.getNic(); String routerIP = cmd.getAccessDetail(NetworkElementCommand.ROUTER_IP); String routerGIP = cmd.getAccessDetail(NetworkElementCommand.ROUTER_GUEST_IP); String routerName = cmd.getAccessDetail(NetworkElementCommand.ROUTER_NAME); String gateway = cmd.getAccessDetail(NetworkElementCommand.GUEST_NETWORK_GATEWAY); String cidr = Long.toString(NetUtils.getCidrSize(nic.getNetmask()));; String domainName = cmd.getNetworkDomain(); String dns = cmd.getDefaultDns1(); if (dns == null || dns.isEmpty()) { dns = cmd.getDefaultDns2(); } else { String dns2= cmd.getDefaultDns2(); if ( dns2 != null && !dns2.isEmpty()) { dns += "," + dns2; } } try { conn = LibvirtConnection.getConnection(); Domain vm = getDomain(conn, routerName); List<InterfaceDef> pluggedNics = getInterfaces(conn, routerName); InterfaceDef routerNic = null; for (InterfaceDef pluggedNic : pluggedNics) { if (pluggedNic.getMacAddress().equalsIgnoreCase(nic.getMac())) { routerNic = pluggedNic; break; } } if ( routerNic == null ) { return new SetupGuestNetworkAnswer(cmd, false, "Can not find nic with mac " + nic.getMac() + " for VM " + routerName); } String args = "vpc_guestnw.sh " + routerIP + " -C"; String dev = "eth" + nic.getDeviceId(); String netmask = NetUtils.getSubNet(routerGIP, nic.getNetmask()); String result = _virtRouterResource.assignGuestNetwork(dev, routerIP, routerGIP, gateway, cidr, netmask, dns, domainName ); if (result != null) { return new SetupGuestNetworkAnswer(cmd, false, "Creating guest network failed due to " + result); } return new SetupGuestNetworkAnswer(cmd, true, "success"); } catch (Exception e) { String msg = "Creating guest network failed due to " + e.toString(); s_logger.warn(msg, e); return new SetupGuestNetworkAnswer(cmd, false, msg); } } private SetNetworkACLAnswer execute(SetNetworkACLCommand cmd) { String[] results = new String[cmd.getRules().length]; String callResult; Connect conn; String routerName = cmd.getAccessDetail(NetworkElementCommand.ROUTER_NAME); String routerIp = cmd.getAccessDetail(NetworkElementCommand.ROUTER_IP); try { conn = LibvirtConnection.getConnection(); Domain vm = getDomain(conn, routerName); String [][] rules = cmd.generateFwRules(); String[] aclRules = rules[0]; NicTO nic = cmd.getNic(); String dev = "eth" + nic.getDeviceId(); String netmask = Long.toString(NetUtils.getCidrSize(nic.getNetmask())); StringBuilder sb = new StringBuilder(); for (int i = 0; i < aclRules.length; i++) { sb.append(aclRules[i]).append(','); } String rule = sb.toString(); String result = _virtRouterResource.assignNetworkACL(routerIp, dev, nic.getIp(), netmask, rule); if (result != null) { for (int i=0; i < results.length; i++) { results[i] = "Failed"; } return new SetNetworkACLAnswer(cmd, false, results); } return new SetNetworkACLAnswer(cmd, true, results); } catch (Exception e) { String msg = "SetNetworkACL failed due to " + e.toString(); s_logger.error(msg, e); return new SetNetworkACLAnswer(cmd, false, results); } } protected SetSourceNatAnswer execute(SetSourceNatCommand cmd) { Connect conn; String routerName = cmd.getAccessDetail(NetworkElementCommand.ROUTER_NAME); String routerIP = cmd.getAccessDetail(NetworkElementCommand.ROUTER_IP); IpAddressTO pubIP = cmd.getIpAddress(); try { conn = LibvirtConnection.getConnection(); Domain vm = getDomain(conn, routerName); Integer devNum = 0; String pubVlan = pubIP.getVlanId(); List<InterfaceDef> pluggedNics = getInterfaces(conn, routerName); for (InterfaceDef pluggedNic : pluggedNics) { String pluggedVlanBr = pluggedNic.getBrName(); String pluggedVlanId = getVlanIdFromBridge(pluggedVlanBr); if (pubVlan.equalsIgnoreCase(Vlan.UNTAGGED) && pluggedVlanBr.equalsIgnoreCase(_publicBridgeName)) { break; } else if (pluggedVlanBr.equalsIgnoreCase(_linkLocalBridgeName)){ /*skip over, no physical bridge device exists*/ } else if (pluggedVlanId == null) { /*this should only be true in the case of link local bridge*/ return new SetSourceNatAnswer(cmd, false, "unable to find the vlan id for bridge "+pluggedVlanBr+ " when attempting to set up" + pubVlan + " on router " + routerName); } else if (pluggedVlanId.equals(pubVlan)) { break; } devNum++; } String dev = "eth" + devNum; String result = _virtRouterResource.assignSourceNat(routerIP, pubIP.getPublicIp(), dev); if (result != null) { return new SetSourceNatAnswer(cmd, false, "KVM plugin \"vpc_snat\" failed:"+result); } return new SetSourceNatAnswer(cmd, true, "success"); } catch (Exception e) { String msg = "Ip SNAT failure due to " + e.toString(); s_logger.error(msg, e); return new SetSourceNatAnswer(cmd, false, msg); } } protected IpAssocAnswer execute(IpAssocVpcCommand cmd) { Connect conn; String[] results = new String[cmd.getIpAddresses().length]; int i = 0; String routerName = cmd.getAccessDetail(NetworkElementCommand.ROUTER_NAME); String routerIP = cmd.getAccessDetail(NetworkElementCommand.ROUTER_IP); try { conn = LibvirtConnection.getConnection(); IpAddressTO[] ips = cmd.getIpAddresses(); Domain vm = getDomain(conn, routerName); Integer devNum = 0; Map<String, Integer> vlanToNicNum = new HashMap<String, Integer>(); List<InterfaceDef> pluggedNics = getInterfaces(conn, routerName); for (InterfaceDef pluggedNic : pluggedNics) { String pluggedVlan = pluggedNic.getBrName(); if (pluggedVlan.equalsIgnoreCase(_linkLocalBridgeName)) { vlanToNicNum.put("LinkLocal",devNum); } else if (pluggedVlan.equalsIgnoreCase(_publicBridgeName) || pluggedVlan.equalsIgnoreCase(_privBridgeName) || pluggedVlan.equalsIgnoreCase(_guestBridgeName)) { vlanToNicNum.put(Vlan.UNTAGGED,devNum); } else { vlanToNicNum.put(getVlanIdFromBridge(pluggedVlan),devNum); } devNum++; } for (IpAddressTO ip : ips) { String nicName = "eth" + vlanToNicNum.get(ip.getVlanId()); String netmask = Long.toString(NetUtils.getCidrSize(ip.getVlanNetmask())); String subnet = NetUtils.getSubNet(ip.getPublicIp(), ip.getVlanNetmask()); _virtRouterResource.assignVpcIpToRouter(routerIP, ip.isAdd(), ip.getPublicIp(), nicName, ip.getVlanGateway(), netmask, subnet); results[i++] = ip.getPublicIp() + " - success"; } } catch (Exception e) { s_logger.error("Ip Assoc failure on applying one ip due to exception: ", e); results[i++] = IpAssocAnswer.errorResult; } return new IpAssocAnswer(cmd, results); } public Answer execute(IpAssocCommand cmd) { String routerName = cmd .getAccessDetail(NetworkElementCommand.ROUTER_NAME); String routerIp = cmd.getAccessDetail(NetworkElementCommand.ROUTER_IP); String[] results = new String[cmd.getIpAddresses().length]; Connect conn; try { conn = LibvirtConnection.getConnection(); List<InterfaceDef> nics = getInterfaces(conn, routerName); Map<String, Integer> vlanAllocatedToVM = new HashMap<String, Integer>(); Integer nicPos = 0; for (InterfaceDef nic : nics) { if (nic.getBrName().equalsIgnoreCase(_linkLocalBridgeName)) { vlanAllocatedToVM.put("LinkLocal", nicPos); } else { if (nic.getBrName().equalsIgnoreCase(_publicBridgeName) || nic.getBrName().equalsIgnoreCase(_privBridgeName) || nic.getBrName().equalsIgnoreCase(_guestBridgeName)) { vlanAllocatedToVM.put(Vlan.UNTAGGED, nicPos); } else { String vlanId = getVlanIdFromBridge(nic.getBrName()); vlanAllocatedToVM.put(vlanId, nicPos); } } nicPos++; } IpAddressTO[] ips = cmd.getIpAddresses(); int i = 0; String result = null; int nicNum = 0; for (IpAddressTO ip : ips) { if (!vlanAllocatedToVM.containsKey(ip.getVlanId())) { /* plug a vif into router */ VifHotPlug(conn, routerName, ip.getVlanId(), ip.getVifMacAddress()); vlanAllocatedToVM.put(ip.getVlanId(), nicPos++); } nicNum = vlanAllocatedToVM.get(ip.getVlanId()); networkUsage(routerIp, "addVif", "eth" + nicNum); result = _virtRouterResource.assignPublicIpAddress(routerName, routerIp, ip.getPublicIp(), ip.isAdd(), ip.isFirstIP(), ip.isSourceNat(), ip.getVlanId(), ip.getVlanGateway(), ip.getVlanNetmask(), ip.getVifMacAddress(), nicNum); if (result != null) { results[i++] = IpAssocAnswer.errorResult; } else { results[i++] = ip.getPublicIp() + " - success"; ; } } return new IpAssocAnswer(cmd, results); } catch (LibvirtException e) { return new IpAssocAnswer(cmd, results); } catch (InternalErrorException e) { return new IpAssocAnswer(cmd, results); } } protected ManageSnapshotAnswer execute(final ManageSnapshotCommand cmd) { String snapshotName = cmd.getSnapshotName(); String snapshotPath = cmd.getSnapshotPath(); String vmName = cmd.getVmName(); try { Connect conn = LibvirtConnection.getConnection(); DomainInfo.DomainState state = null; Domain vm = null; if (vmName != null) { try { vm = getDomain(conn, cmd.getVmName()); state = vm.getInfo().state; } catch (LibvirtException e) { } } KVMStoragePool primaryPool = _storagePoolMgr.getStoragePool( cmd.getPool().getType(), cmd.getPool().getUuid()); if (primaryPool.getType() == StoragePoolType.RBD) { s_logger.debug("Snapshots are not supported on RBD volumes"); return new ManageSnapshotAnswer(cmd, false, "Snapshots are not supported on RBD volumes"); } KVMPhysicalDisk disk = primaryPool.getPhysicalDisk(cmd .getVolumePath()); if (state == DomainInfo.DomainState.VIR_DOMAIN_RUNNING && !primaryPool.isExternalSnapshot()) { String vmUuid = vm.getUUIDString(); Object[] args = new Object[] { snapshotName, vmUuid }; String snapshot = SnapshotXML.format(args); s_logger.debug(snapshot); if (cmd.getCommandSwitch().equalsIgnoreCase( ManageSnapshotCommand.CREATE_SNAPSHOT)) { vm.snapshotCreateXML(snapshot); } else { DomainSnapshot snap = vm.snapshotLookupByName(snapshotName); snap.delete(0); } /* * libvirt on RHEL6 doesn't handle resume event emitted from * qemu */ vm = getDomain(conn, cmd.getVmName()); state = vm.getInfo().state; if (state == DomainInfo.DomainState.VIR_DOMAIN_PAUSED) { vm.resume(); } } else { /* VM is not running, create a snapshot by ourself */ final Script command = new Script(_manageSnapshotPath, _cmdsTimeout, s_logger); if (cmd.getCommandSwitch().equalsIgnoreCase( ManageSnapshotCommand.CREATE_SNAPSHOT)) { command.add("-c", disk.getPath()); } else { command.add("-d", snapshotPath); } command.add("-n", snapshotName); String result = command.execute(); if (result != null) { s_logger.debug("Failed to manage snapshot: " + result); return new ManageSnapshotAnswer(cmd, false, "Failed to manage snapshot: " + result); } } return new ManageSnapshotAnswer(cmd, cmd.getSnapshotId(), disk.getPath() + File.separator + snapshotName, true, null); } catch (LibvirtException e) { s_logger.debug("Failed to manage snapshot: " + e.toString()); return new ManageSnapshotAnswer(cmd, false, "Failed to manage snapshot: " + e.toString()); } } protected BackupSnapshotAnswer execute(final BackupSnapshotCommand cmd) { Long dcId = cmd.getDataCenterId(); Long accountId = cmd.getAccountId(); Long volumeId = cmd.getVolumeId(); String secondaryStoragePoolUrl = cmd.getSecondaryStorageUrl(); String snapshotName = cmd.getSnapshotName(); String snapshotPath = cmd.getVolumePath(); String snapshotDestPath = null; String snapshotRelPath = null; String vmName = cmd.getVmName(); KVMStoragePool secondaryStoragePool = null; try { Connect conn = LibvirtConnection.getConnection(); secondaryStoragePool = _storagePoolMgr.getStoragePoolByURI( secondaryStoragePoolUrl); String ssPmountPath = secondaryStoragePool.getLocalPath(); snapshotRelPath = File.separator + "snapshots" + File.separator + dcId + File.separator + accountId + File.separator + volumeId; snapshotDestPath = ssPmountPath + File.separator + "snapshots" + File.separator + dcId + File.separator + accountId + File.separator + volumeId; KVMStoragePool primaryPool = _storagePoolMgr.getStoragePool( cmd.getPool().getType(), cmd.getPrimaryStoragePoolNameLabel()); KVMPhysicalDisk snapshotDisk = primaryPool.getPhysicalDisk(cmd .getVolumePath()); Script command = new Script(_manageSnapshotPath, _cmdsTimeout, s_logger); command.add("-b", snapshotDisk.getPath()); command.add("-n", snapshotName); command.add("-p", snapshotDestPath); command.add("-t", snapshotName); String result = command.execute(); if (result != null) { s_logger.debug("Failed to backup snaptshot: " + result); return new BackupSnapshotAnswer(cmd, false, result, null, true); } /* Delete the snapshot on primary */ DomainInfo.DomainState state = null; Domain vm = null; if (vmName != null) { try { vm = getDomain(conn, cmd.getVmName()); state = vm.getInfo().state; } catch (LibvirtException e) { } } KVMStoragePool primaryStorage = _storagePoolMgr.getStoragePool( cmd.getPool().getType(), cmd.getPool().getUuid()); if (state == DomainInfo.DomainState.VIR_DOMAIN_RUNNING && !primaryStorage.isExternalSnapshot()) { String vmUuid = vm.getUUIDString(); Object[] args = new Object[] { snapshotName, vmUuid }; String snapshot = SnapshotXML.format(args); s_logger.debug(snapshot); DomainSnapshot snap = vm.snapshotLookupByName(snapshotName); snap.delete(0); /* * libvirt on RHEL6 doesn't handle resume event emitted from * qemu */ vm = getDomain(conn, cmd.getVmName()); state = vm.getInfo().state; if (state == DomainInfo.DomainState.VIR_DOMAIN_PAUSED) { vm.resume(); } } else { command = new Script(_manageSnapshotPath, _cmdsTimeout, s_logger); command.add("-d", snapshotDisk.getPath()); command.add("-n", snapshotName); result = command.execute(); if (result != null) { s_logger.debug("Failed to backup snapshot: " + result); return new BackupSnapshotAnswer(cmd, false, "Failed to backup snapshot: " + result, null, true); } } } catch (LibvirtException e) { return new BackupSnapshotAnswer(cmd, false, e.toString(), null, true); } catch (CloudRuntimeException e) { return new BackupSnapshotAnswer(cmd, false, e.toString(), null, true); } finally { if (secondaryStoragePool != null) { secondaryStoragePool.delete(); } } return new BackupSnapshotAnswer(cmd, true, null, snapshotRelPath + File.separator + snapshotName, true); } protected DeleteSnapshotBackupAnswer execute( final DeleteSnapshotBackupCommand cmd) { Long dcId = cmd.getDataCenterId(); Long accountId = cmd.getAccountId(); Long volumeId = cmd.getVolumeId(); KVMStoragePool secondaryStoragePool = null; try { secondaryStoragePool = _storagePoolMgr.getStoragePoolByURI(cmd .getSecondaryStorageUrl()); String ssPmountPath = secondaryStoragePool.getLocalPath(); String snapshotDestPath = ssPmountPath + File.separator + "snapshots" + File.separator + dcId + File.separator + accountId + File.separator + volumeId; final Script command = new Script(_manageSnapshotPath, _cmdsTimeout, s_logger); command.add("-d", snapshotDestPath); command.add("-n", cmd.getSnapshotName()); command.execute(); } catch (CloudRuntimeException e) { return new DeleteSnapshotBackupAnswer(cmd, false, e.toString()); } finally { if (secondaryStoragePool != null) { secondaryStoragePool.delete(); } } return new DeleteSnapshotBackupAnswer(cmd, true, null); } protected Answer execute(DeleteSnapshotsDirCommand cmd) { Long dcId = cmd.getDcId(); Long accountId = cmd.getAccountId(); Long volumeId = cmd.getVolumeId(); KVMStoragePool secondaryStoragePool = null; try { secondaryStoragePool = _storagePoolMgr.getStoragePoolByURI(cmd .getSecondaryStorageUrl()); String ssPmountPath = secondaryStoragePool.getLocalPath(); String snapshotDestPath = ssPmountPath + File.separator + "snapshots" + File.separator + dcId + File.separator + accountId + File.separator + volumeId; final Script command = new Script(_manageSnapshotPath, _cmdsTimeout, s_logger); command.add("-d", snapshotDestPath); command.add("-f"); command.execute(); } catch (CloudRuntimeException e) { return new Answer(cmd, false, e.toString()); } finally { if (secondaryStoragePool != null) { secondaryStoragePool.delete(); } } return new Answer(cmd, true, null); } protected CreateVolumeFromSnapshotAnswer execute( final CreateVolumeFromSnapshotCommand cmd) { try { String snapshotPath = cmd.getSnapshotUuid(); int index = snapshotPath.lastIndexOf("/"); snapshotPath = snapshotPath.substring(0, index); KVMStoragePool secondaryPool = _storagePoolMgr.getStoragePoolByURI( cmd.getSecondaryStorageUrl() + snapshotPath); KVMPhysicalDisk snapshot = secondaryPool.getPhysicalDisk(cmd .getSnapshotName()); String primaryUuid = cmd.getPrimaryStoragePoolNameLabel(); KVMStoragePool primaryPool = _storagePoolMgr .getStoragePool(cmd.getPool().getType(), primaryUuid); String volUuid = UUID.randomUUID().toString(); KVMPhysicalDisk disk = _storagePoolMgr.copyPhysicalDisk(snapshot, volUuid, primaryPool); return new CreateVolumeFromSnapshotAnswer(cmd, true, "", disk.getName()); } catch (CloudRuntimeException e) { return new CreateVolumeFromSnapshotAnswer(cmd, false, e.toString(), null); } } protected Answer execute(final UpgradeSnapshotCommand cmd) { return new Answer(cmd, true, "success"); } protected CreatePrivateTemplateAnswer execute( final CreatePrivateTemplateFromSnapshotCommand cmd) { String templateFolder = cmd.getAccountId() + File.separator + cmd.getNewTemplateId(); String templateInstallFolder = "template/tmpl/" + templateFolder; String tmplName = UUID.randomUUID().toString(); String tmplFileName = tmplName + ".qcow2"; KVMStoragePool secondaryPool = null; KVMStoragePool snapshotPool = null; try { String snapshotPath = cmd.getSnapshotUuid(); int index = snapshotPath.lastIndexOf("/"); snapshotPath = snapshotPath.substring(0, index); snapshotPool = _storagePoolMgr.getStoragePoolByURI(cmd .getSecondaryStorageUrl() + snapshotPath); KVMPhysicalDisk snapshot = snapshotPool.getPhysicalDisk(cmd .getSnapshotName()); secondaryPool = _storagePoolMgr.getStoragePoolByURI( cmd.getSecondaryStorageUrl()); String templatePath = secondaryPool.getLocalPath() + File.separator + templateInstallFolder; _storage.mkdirs(templatePath); String tmplPath = templateInstallFolder + File.separator + tmplFileName; Script command = new Script(_createTmplPath, _cmdsTimeout, s_logger); command.add("-t", templatePath); command.add("-n", tmplFileName); command.add("-f", snapshot.getPath()); command.execute(); Map<String, Object> params = new HashMap<String, Object>(); params.put(StorageLayer.InstanceConfigKey, _storage); Processor qcow2Processor = new QCOW2Processor(); qcow2Processor.configure("QCOW2 Processor", params); FormatInfo info = qcow2Processor.process(templatePath, null, tmplName); TemplateLocation loc = new TemplateLocation(_storage, templatePath); loc.create(1, true, tmplName); loc.addFormat(info); loc.save(); return new CreatePrivateTemplateAnswer(cmd, true, "", tmplPath, info.virtualSize, info.size, tmplName, info.format); } catch (ConfigurationException e) { return new CreatePrivateTemplateAnswer(cmd, false, e.getMessage()); } catch (InternalErrorException e) { return new CreatePrivateTemplateAnswer(cmd, false, e.getMessage()); } catch (IOException e) { return new CreatePrivateTemplateAnswer(cmd, false, e.getMessage()); } catch (CloudRuntimeException e) { return new CreatePrivateTemplateAnswer(cmd, false, e.getMessage()); } finally { if (secondaryPool != null) { secondaryPool.delete(); } if (snapshotPool != null) { snapshotPool.delete(); } } } protected GetStorageStatsAnswer execute(final GetStorageStatsCommand cmd) { try { KVMStoragePool sp = _storagePoolMgr.getStoragePool( cmd.getPooltype(), cmd.getStorageId()); return new GetStorageStatsAnswer(cmd, sp.getCapacity(), sp.getUsed()); } catch (CloudRuntimeException e) { return new GetStorageStatsAnswer(cmd, e.toString()); } } protected CreatePrivateTemplateAnswer execute( CreatePrivateTemplateFromVolumeCommand cmd) { String secondaryStorageURL = cmd.getSecondaryStorageUrl(); KVMStoragePool secondaryStorage = null; try { Connect conn = LibvirtConnection.getConnection(); String templateFolder = cmd.getAccountId() + File.separator + cmd.getTemplateId() + File.separator; String templateInstallFolder = "/template/tmpl/" + templateFolder; secondaryStorage = _storagePoolMgr.getStoragePoolByURI( secondaryStorageURL); KVMStoragePool primary = _storagePoolMgr.getStoragePool( cmd.getPool().getType(), cmd.getPrimaryStoragePoolNameLabel()); KVMPhysicalDisk disk = primary.getPhysicalDisk(cmd.getVolumePath()); String tmpltPath = secondaryStorage.getLocalPath() + File.separator + templateInstallFolder; _storage.mkdirs(tmpltPath); if (primary.getType() != StoragePoolType.RBD) { Script command = new Script(_createTmplPath, _cmdsTimeout, s_logger); command.add("-f", disk.getPath()); command.add("-t", tmpltPath); command.add("-n", cmd.getUniqueName() + ".qcow2"); String result = command.execute(); if (result != null) { s_logger.debug("failed to create template: " + result); return new CreatePrivateTemplateAnswer(cmd, false, result); } } else { s_logger.debug("Converting RBD disk " + disk.getPath() + " into template " + cmd.getUniqueName()); Script.runSimpleBashScript("qemu-img convert" + " -f raw -O qcow2 " + KVMPhysicalDisk.RBDStringBuilder(primary.getSourceHost(), primary.getSourcePort(), primary.getAuthUserName(), primary.getAuthSecret(), disk.getPath()) + " " + tmpltPath + "/" + cmd.getUniqueName() + ".qcow2"); File templateProp = new File(tmpltPath + "/template.properties"); if (!templateProp.exists()) { templateProp.createNewFile(); } String templateContent = "filename=" + cmd.getUniqueName() + ".qcow2" + System.getProperty("line.separator"); DateFormat dateFormat = new SimpleDateFormat("MM_dd_yyyy"); Date date = new Date(); templateContent += "snapshot.name=" + dateFormat.format(date) + System.getProperty("line.separator"); FileOutputStream templFo = new FileOutputStream(templateProp); templFo.write(templateContent.getBytes()); templFo.flush(); templFo.close(); } Map<String, Object> params = new HashMap<String, Object>(); params.put(StorageLayer.InstanceConfigKey, _storage); Processor qcow2Processor = new QCOW2Processor(); qcow2Processor.configure("QCOW2 Processor", params); FormatInfo info = qcow2Processor.process(tmpltPath, null, cmd.getUniqueName()); TemplateLocation loc = new TemplateLocation(_storage, tmpltPath); loc.create(1, true, cmd.getUniqueName()); loc.addFormat(info); loc.save(); return new CreatePrivateTemplateAnswer(cmd, true, null, templateInstallFolder + cmd.getUniqueName() + ".qcow2", info.virtualSize, info.size, cmd.getUniqueName(), ImageFormat.QCOW2); } catch (LibvirtException e) { s_logger.debug("Failed to get secondary storage pool: " + e.toString()); return new CreatePrivateTemplateAnswer(cmd, false, e.toString()); } catch (InternalErrorException e) { return new CreatePrivateTemplateAnswer(cmd, false, e.toString()); } catch (IOException e) { return new CreatePrivateTemplateAnswer(cmd, false, e.toString()); } catch (ConfigurationException e) { return new CreatePrivateTemplateAnswer(cmd, false, e.toString()); } catch (CloudRuntimeException e) { return new CreatePrivateTemplateAnswer(cmd, false, e.toString()); } finally { if (secondaryStorage != null) { secondaryStorage.delete(); } } } protected PrimaryStorageDownloadAnswer execute( final PrimaryStorageDownloadCommand cmd) { String tmplturl = cmd.getUrl(); int index = tmplturl.lastIndexOf("/"); String mountpoint = tmplturl.substring(0, index); String tmpltname = null; if (index < tmplturl.length() - 1) { tmpltname = tmplturl.substring(index + 1); } KVMPhysicalDisk tmplVol = null; KVMStoragePool secondaryPool = null; try { secondaryPool = _storagePoolMgr.getStoragePoolByURI(mountpoint); /* Get template vol */ if (tmpltname == null) { secondaryPool.refresh(); List<KVMPhysicalDisk> disks = secondaryPool.listPhysicalDisks(); if (disks == null || disks.isEmpty()) { return new PrimaryStorageDownloadAnswer( "Failed to get volumes from pool: " + secondaryPool.getUuid()); } for (KVMPhysicalDisk disk : disks) { if (disk.getName().endsWith("qcow2")) { tmplVol = disk; break; } } if (tmplVol == null) { return new PrimaryStorageDownloadAnswer( "Failed to get template from pool: " + secondaryPool.getUuid()); } } else { tmplVol = secondaryPool.getPhysicalDisk(tmpltname); } /* Copy volume to primary storage */ KVMStoragePool primaryPool = _storagePoolMgr.getStoragePool( cmd.getPool().getType(), cmd.getPoolUuid()); KVMPhysicalDisk primaryVol = _storagePoolMgr.copyPhysicalDisk( tmplVol, UUID.randomUUID().toString(), primaryPool); return new PrimaryStorageDownloadAnswer(primaryVol.getName(), primaryVol.getSize()); } catch (CloudRuntimeException e) { return new PrimaryStorageDownloadAnswer(e.toString()); } finally { if (secondaryPool != null) { secondaryPool.delete(); } } } protected Answer execute(CreateStoragePoolCommand cmd) { return new Answer(cmd, true, "success"); } protected Answer execute(ModifyStoragePoolCommand cmd) { String poolType = cmd.getPool().getType().toString(); KVMStoragePool storagepool = _storagePoolMgr.createStoragePool(cmd .getPool().getUuid(), cmd.getPool().getHost(), cmd.getPool().getPort(), cmd.getPool().getPath(), cmd.getPool().getUserInfo(), cmd.getPool().getType()); if (storagepool == null) { return new Answer(cmd, false, " Failed to create storage pool"); } Map<String, TemplateInfo> tInfo = new HashMap<String, TemplateInfo>(); ModifyStoragePoolAnswer answer = new ModifyStoragePoolAnswer(cmd, storagepool.getCapacity(), storagepool.getUsed(), tInfo); return answer; } private Answer execute(SecurityGroupRulesCmd cmd) { String vif = null; String brname = null; try { Connect conn = LibvirtConnection.getConnection(); List<InterfaceDef> nics = getInterfaces(conn, cmd.getVmName()); vif = nics.get(0).getDevName(); brname = nics.get(0).getBrName(); } catch (LibvirtException e) { return new SecurityGroupRuleAnswer(cmd, false, e.toString()); } boolean result = add_network_rules(cmd.getVmName(), Long.toString(cmd.getVmId()), cmd.getGuestIp(), cmd.getSignature(), Long.toString(cmd.getSeqNum()), cmd.getGuestMac(), cmd.stringifyRules(), vif, brname); if (!result) { s_logger.warn("Failed to program network rules for vm " + cmd.getVmName()); return new SecurityGroupRuleAnswer(cmd, false, "programming network rules failed"); } else { s_logger.debug("Programmed network rules for vm " + cmd.getVmName() + " guestIp=" + cmd.getGuestIp() + ",ingress numrules=" + cmd.getIngressRuleSet().length + ",egress numrules=" + cmd.getEgressRuleSet().length); return new SecurityGroupRuleAnswer(cmd); } } private Answer execute(CleanupNetworkRulesCmd cmd) { boolean result = cleanup_rules(); return new Answer(cmd, result, ""); } protected GetVncPortAnswer execute(GetVncPortCommand cmd) { try { Connect conn = LibvirtConnection.getConnection(); Integer vncPort = getVncPort(conn, cmd.getName()); return new GetVncPortAnswer(cmd, _privateIp, 5900 + vncPort); } catch (Exception e) { return new GetVncPortAnswer(cmd, e.toString()); } } protected Answer execute(final CheckConsoleProxyLoadCommand cmd) { return executeProxyLoadScan(cmd, cmd.getProxyVmId(), cmd.getProxyVmName(), cmd.getProxyManagementIp(), cmd.getProxyCmdPort()); } protected Answer execute(final WatchConsoleProxyLoadCommand cmd) { return executeProxyLoadScan(cmd, cmd.getProxyVmId(), cmd.getProxyVmName(), cmd.getProxyManagementIp(), cmd.getProxyCmdPort()); } protected MaintainAnswer execute(MaintainCommand cmd) { return new MaintainAnswer(cmd); } private Answer executeProxyLoadScan(final Command cmd, final long proxyVmId, final String proxyVmName, final String proxyManagementIp, final int cmdPort) { String result = null; final StringBuffer sb = new StringBuffer(); sb.append("http://").append(proxyManagementIp).append(":" + cmdPort) .append("/cmd/getstatus"); boolean success = true; try { final URL url = new URL(sb.toString()); final URLConnection conn = url.openConnection(); final InputStream is = conn.getInputStream(); final BufferedReader reader = new BufferedReader( new InputStreamReader(is)); final StringBuilder sb2 = new StringBuilder(); String line = null; try { while ((line = reader.readLine()) != null) { sb2.append(line + "\n"); } result = sb2.toString(); } catch (final IOException e) { success = false; } finally { try { is.close(); } catch (final IOException e) { s_logger.warn("Exception when closing , console proxy address : " + proxyManagementIp); success = false; } } } catch (final IOException e) { s_logger.warn("Unable to open console proxy command port url, console proxy address : " + proxyManagementIp); success = false; } return new ConsoleProxyLoadAnswer(cmd, proxyVmId, proxyVmName, success, result); } private Answer execute(AttachIsoCommand cmd) { try { Connect conn = LibvirtConnection.getConnection(); attachOrDetachISO(conn, cmd.getVmName(), cmd.getIsoPath(), cmd.isAttach()); } catch (LibvirtException e) { return new Answer(cmd, false, e.toString()); } catch (URISyntaxException e) { return new Answer(cmd, false, e.toString()); } catch (InternalErrorException e) { return new Answer(cmd, false, e.toString()); } return new Answer(cmd); } private AttachVolumeAnswer execute(AttachVolumeCommand cmd) { try { Connect conn = LibvirtConnection.getConnection(); KVMStoragePool primary = _storagePoolMgr.getStoragePool( cmd.getPooltype(), cmd.getPoolUuid()); KVMPhysicalDisk disk = primary.getPhysicalDisk(cmd.getVolumePath()); attachOrDetachDisk(conn, cmd.getAttach(), cmd.getVmName(), disk, cmd.getDeviceId().intValue()); } catch (LibvirtException e) { return new AttachVolumeAnswer(cmd, e.toString()); } catch (InternalErrorException e) { return new AttachVolumeAnswer(cmd, e.toString()); } return new AttachVolumeAnswer(cmd, cmd.getDeviceId()); } private Answer execute(ReadyCommand cmd) { return new ReadyAnswer(cmd); } protected State convertToState(DomainInfo.DomainState ps) { final State state = s_statesTable.get(ps); return state == null ? State.Unknown : state; } protected State getVmState(Connect conn, final String vmName) { int retry = 3; Domain vms = null; while (retry-- > 0) { try { vms = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vmName .getBytes())); State s = convertToState(vms.getInfo().state); return s; } catch (final LibvirtException e) { s_logger.warn("Can't get vm state " + vmName + e.getMessage() + "retry:" + retry); } catch (Exception e) { s_logger.warn("Can't get vm state " + vmName + e.getMessage() + "retry:" + retry); } finally { try { if (vms != null) { vms.free(); } } catch (final LibvirtException e) { } } } return State.Stopped; } private Answer execute(CheckVirtualMachineCommand cmd) { try { Connect conn = LibvirtConnection.getConnection(); final State state = getVmState(conn, cmd.getVmName()); Integer vncPort = null; if (state == State.Running) { vncPort = getVncPort(conn, cmd.getVmName()); synchronized (_vms) { _vms.put(cmd.getVmName(), State.Running); } } return new CheckVirtualMachineAnswer(cmd, state, vncPort); } catch (LibvirtException e) { return new CheckVirtualMachineAnswer(cmd, e.getMessage()); } } private Answer execute(PingTestCommand cmd) { String result = null; final String computingHostIp = cmd.getComputingHostIp(); // TODO, split // the // command // into 2 // types if (computingHostIp != null) { result = doPingTest(computingHostIp); } else if (cmd.getRouterIp() != null && cmd.getPrivateIp() != null) { result = doPingTest(cmd.getRouterIp(), cmd.getPrivateIp()); } else { return new Answer(cmd, false, "routerip and private ip is null"); } if (result != null) { return new Answer(cmd, false, result); } return new Answer(cmd); } private String doPingTest(final String computingHostIp) { final Script command = new Script(_pingTestPath, 10000, s_logger); command.add("-h", computingHostIp); return command.execute(); } private String doPingTest(final String domRIp, final String vmIp) { final Script command = new Script(_pingTestPath, 10000, s_logger); command.add("-i", domRIp); command.add("-p", vmIp); return command.execute(); } private synchronized Answer execute(MigrateCommand cmd) { String vmName = cmd.getVmName(); State state = null; String result = null; synchronized (_vms) { state = _vms.get(vmName); _vms.put(vmName, State.Stopping); } List<InterfaceDef> ifaces = null; Domain dm = null; Connect dconn = null; Domain destDomain = null; Connect conn = null; try { conn = LibvirtConnection.getConnection(); ifaces = getInterfaces(conn, vmName); dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vmName .getBytes())); dconn = new Connect("qemu+tcp://" + cmd.getDestinationIp() + "/system"); /* * Hard code lm flags: VIR_MIGRATE_LIVE(1<<0) and * VIR_MIGRATE_PERSIST_DEST(1<<3) */ destDomain = dm.migrate(dconn, (1 << 0) | (1 << 3), vmName, "tcp:" + cmd.getDestinationIp(), _migrateSpeed); } catch (LibvirtException e) { s_logger.debug("Can't migrate domain: " + e.getMessage()); result = e.getMessage(); } catch (Exception e) { s_logger.debug("Can't migrate domain: " + e.getMessage()); result = e.getMessage(); } finally { try { if (dm != null) { dm.free(); } if (dconn != null) { dconn.close(); } if (destDomain != null) { destDomain.free(); } } catch (final LibvirtException e) { } } if (result != null) { synchronized (_vms) { _vms.put(vmName, state); } } else { destroy_network_rules_for_vm(conn, vmName); for (InterfaceDef iface : ifaces) { _vifDriver.unplug(iface); } cleanupVM(conn, vmName, getVnetId(VirtualMachineName.getVnet(vmName))); } return new MigrateAnswer(cmd, result == null, result, null); } private synchronized Answer execute(PrepareForMigrationCommand cmd) { VirtualMachineTO vm = cmd.getVirtualMachine(); if (s_logger.isDebugEnabled()) { s_logger.debug("Preparing host for migrating " + vm); } NicTO[] nics = vm.getNics(); try { Connect conn = LibvirtConnection.getConnection(); for (NicTO nic : nics) { _vifDriver.plug(nic, null); } /* setup disks, e.g for iso */ VolumeTO[] volumes = vm.getDisks(); for (VolumeTO volume : volumes) { if (volume.getType() == Volume.Type.ISO) { getVolumePath(conn, volume); } } synchronized (_vms) { _vms.put(vm.getName(), State.Migrating); } return new PrepareForMigrationAnswer(cmd); } catch (LibvirtException e) { return new PrepareForMigrationAnswer(cmd, e.toString()); } catch (InternalErrorException e) { return new PrepareForMigrationAnswer(cmd, e.toString()); } catch (URISyntaxException e) { return new PrepareForMigrationAnswer(cmd, e.toString()); } } private Answer execute(CheckHealthCommand cmd) { return new CheckHealthAnswer(cmd, true); } private Answer execute(GetHostStatsCommand cmd) { final Script cpuScript = new Script("/bin/bash", s_logger); cpuScript.add("-c"); cpuScript .add("idle=$(top -b -n 1|grep Cpu\\(s\\):|cut -d% -f4|cut -d, -f2);echo $idle"); final OutputInterpreter.OneLineParser parser = new OutputInterpreter.OneLineParser(); String result = cpuScript.execute(parser); if (result != null) { s_logger.debug("Unable to get the host CPU state: " + result); return new Answer(cmd, false, result); } double cpuUtil = (100.0D - Double.parseDouble(parser.getLine())); long freeMem = 0; final Script memScript = new Script("/bin/bash", s_logger); memScript.add("-c"); memScript .add("freeMem=$(free|grep cache:|awk '{print $4}');echo $freeMem"); final OutputInterpreter.OneLineParser Memparser = new OutputInterpreter.OneLineParser(); result = memScript.execute(Memparser); if (result != null) { s_logger.debug("Unable to get the host Mem state: " + result); return new Answer(cmd, false, result); } freeMem = Long.parseLong(Memparser.getLine()); Script totalMem = new Script("/bin/bash", s_logger); totalMem.add("-c"); totalMem.add("free|grep Mem:|awk '{print $2}'"); final OutputInterpreter.OneLineParser totMemparser = new OutputInterpreter.OneLineParser(); result = totalMem.execute(totMemparser); if (result != null) { s_logger.debug("Unable to get the host Mem state: " + result); return new Answer(cmd, false, result); } long totMem = Long.parseLong(totMemparser.getLine()); Pair<Double, Double> nicStats = getNicStats(_publicBridgeName); HostStatsEntry hostStats = new HostStatsEntry(cmd.getHostId(), cpuUtil, nicStats.first() / 1000, nicStats.second() / 1000, "host", totMem, freeMem, 0, 0); return new GetHostStatsAnswer(cmd, hostStats); } protected String networkUsage(final String privateIpAddress, final String option, final String vif) { Script getUsage = new Script(_routerProxyPath, s_logger); getUsage.add("netusage.sh"); getUsage.add(privateIpAddress); if (option.equals("get")) { getUsage.add("-g"); } else if (option.equals("create")) { getUsage.add("-c"); } else if (option.equals("reset")) { getUsage.add("-r"); } else if (option.equals("addVif")) { getUsage.add("-a", vif); } else if (option.equals("deleteVif")) { getUsage.add("-d", vif); } final OutputInterpreter.OneLineParser usageParser = new OutputInterpreter.OneLineParser(); String result = getUsage.execute(usageParser); if (result != null) { s_logger.debug("Failed to execute networkUsage:" + result); return null; } return usageParser.getLine(); } protected long[] getNetworkStats(String privateIP) { String result = networkUsage(privateIP, "get", null); long[] stats = new long[2]; if (result != null) { String[] splitResult = result.split(":"); int i = 0; while (i < splitResult.length - 1) { stats[0] += (new Long(splitResult[i++])).longValue(); stats[1] += (new Long(splitResult[i++])).longValue(); } } return stats; } private Answer execute(NetworkUsageCommand cmd) { if (cmd.getOption() != null && cmd.getOption().equals("create")) { String result = networkUsage(cmd.getPrivateIP(), "create", null); NetworkUsageAnswer answer = new NetworkUsageAnswer(cmd, result, 0L, 0L); return answer; } long[] stats = getNetworkStats(cmd.getPrivateIP()); NetworkUsageAnswer answer = new NetworkUsageAnswer(cmd, "", stats[0], stats[1]); return answer; } private Answer execute(RebootCommand cmd) { synchronized (_vms) { _vms.put(cmd.getVmName(), State.Starting); } try { Connect conn = LibvirtConnection.getConnection(); final String result = rebootVM(conn, cmd.getVmName()); if (result == null) { Integer vncPort = null; try { vncPort = getVncPort(conn, cmd.getVmName()); } catch (Exception e) { } get_rule_logs_for_vms(); return new RebootAnswer(cmd, null, vncPort); } else { return new RebootAnswer(cmd, result, false); } } catch (LibvirtException e) { return new RebootAnswer(cmd, e.getMessage(), false); } finally { synchronized (_vms) { _vms.put(cmd.getVmName(), State.Running); } } } protected Answer execute(RebootRouterCommand cmd) { RebootAnswer answer = (RebootAnswer) execute((RebootCommand) cmd); String result = _virtRouterResource.connect(cmd.getPrivateIpAddress()); if (result == null) { networkUsage(cmd.getPrivateIpAddress(), "create", null); return answer; } else { return new Answer(cmd, false, result); } } protected GetVmStatsAnswer execute(GetVmStatsCommand cmd) { List<String> vmNames = cmd.getVmNames(); try { HashMap<String, VmStatsEntry> vmStatsNameMap = new HashMap<String, VmStatsEntry>(); Connect conn = LibvirtConnection.getConnection(); for (String vmName : vmNames) { VmStatsEntry statEntry = getVmStat(conn, vmName); if (statEntry == null) { continue; } vmStatsNameMap.put(vmName, statEntry); } return new GetVmStatsAnswer(cmd, vmStatsNameMap); } catch (LibvirtException e) { s_logger.debug("Can't get vm stats: " + e.toString()); return new GetVmStatsAnswer(cmd, null); } } protected Answer execute(StopCommand cmd) { final String vmName = cmd.getVmName(); State state = null; synchronized (_vms) { state = _vms.get(vmName); _vms.put(vmName, State.Stopping); } try { Connect conn = LibvirtConnection.getConnection(); List<DiskDef> disks = getDisks(conn, vmName); List<InterfaceDef> ifaces = getInterfaces(conn, vmName); destroy_network_rules_for_vm(conn, vmName); String result = stopVM(conn, vmName, defineOps.UNDEFINE_VM); if (result == null) { for (DiskDef disk : disks) { if (disk.getDeviceType() == DiskDef.deviceType.CDROM && disk.getDiskPath() != null) { cleanupDisk(conn, disk); } } for (InterfaceDef iface: ifaces) { _vifDriver.unplug(iface); } } final String result2 = cleanupVnet(conn, cmd.getVnet()); if (result != null && result2 != null) { result = result2 + result; } state = State.Stopped; return new StopAnswer(cmd, result, 0, true); } catch (LibvirtException e) { return new StopAnswer(cmd, e.getMessage(), false); } finally { synchronized (_vms) { if (state != null) { _vms.put(vmName, state); } else { _vms.remove(vmName); } } } } protected Answer execute(ModifySshKeysCommand cmd) { File sshKeysDir = new File(_SSHKEYSPATH); String result = null; if (!sshKeysDir.exists()) { // Change permissions for the 700 Script script = new Script("mkdir", _timeout, s_logger); script.add("-m","700"); script.add(_SSHKEYSPATH); script.execute(); if(!sshKeysDir.exists()) { s_logger.debug("failed to create directory " + _SSHKEYSPATH); } } File pubKeyFile = new File(_SSHPUBKEYPATH); if (!pubKeyFile.exists()) { try { pubKeyFile.createNewFile(); } catch (IOException e) { result = "Failed to create file: " + e.toString(); s_logger.debug(result); } } if (pubKeyFile.exists()) { String pubKey = cmd.getPubKey(); try { FileOutputStream pubkStream = new FileOutputStream(pubKeyFile); pubkStream.write(pubKey.getBytes()); pubkStream.close(); } catch (FileNotFoundException e) { result = "File" + _SSHPUBKEYPATH + "is not found:" + e.toString(); s_logger.debug(result); } catch (IOException e) { result = "Write file " + _SSHPUBKEYPATH + ":" + e.toString(); s_logger.debug(result); } } File prvKeyFile = new File(_SSHPRVKEYPATH); if (!prvKeyFile.exists()) { try { prvKeyFile.createNewFile(); } catch (IOException e) { result = "Failed to create file: " + e.toString(); s_logger.debug(result); } } if (prvKeyFile.exists()) { String prvKey = cmd.getPrvKey(); try { FileOutputStream prvKStream = new FileOutputStream(prvKeyFile); prvKStream.write(prvKey.getBytes()); prvKStream.close(); } catch (FileNotFoundException e) { result = "File" + _SSHPRVKEYPATH + "is not found:" + e.toString(); s_logger.debug(result); } catch (IOException e) { result = "Write file " + _SSHPRVKEYPATH + ":" + e.toString(); s_logger.debug(result); } Script script = new Script("chmod", _timeout, s_logger); script.add("600", _SSHPRVKEYPATH); script.execute(); } if (result != null) { return new Answer(cmd, false, result); } else { return new Answer(cmd, true, null); } } protected void handleVmStartFailure(Connect conn, String vmName, LibvirtVMDef vm) { if (vm != null && vm.getDevices() != null) { cleanupVMNetworks(conn, vm.getDevices().getInterfaces()); } } protected LibvirtVMDef createVMFromSpec(VirtualMachineTO vmTO) { LibvirtVMDef vm = new LibvirtVMDef(); vm.setHvsType(_hypervisorType); vm.setDomainName(vmTO.getName()); vm.setDomUUID(UUID.nameUUIDFromBytes(vmTO.getName().getBytes()) .toString()); vm.setDomDescription(vmTO.getOs()); GuestDef guest = new GuestDef(); guest.setGuestType(GuestDef.guestType.KVM); guest.setGuestArch(vmTO.getArch()); guest.setMachineType("pc"); guest.setBootOrder(GuestDef.bootOrder.CDROM); guest.setBootOrder(GuestDef.bootOrder.HARDISK); vm.addComp(guest); GuestResourceDef grd = new GuestResourceDef(); grd.setMemorySize(vmTO.getMinRam() / 1024); grd.setVcpuNum(vmTO.getCpus()); vm.addComp(grd); CpuTuneDef ctd = new CpuTuneDef(); ctd.setShares(vmTO.getCpus() * vmTO.getSpeed()); vm.addComp(ctd); FeaturesDef features = new FeaturesDef(); features.addFeatures("pae"); features.addFeatures("apic"); features.addFeatures("acpi"); vm.addComp(features); TermPolicy term = new TermPolicy(); term.setCrashPolicy("destroy"); term.setPowerOffPolicy("destroy"); term.setRebootPolicy("restart"); vm.addComp(term); ClockDef clock = new ClockDef(); if (vmTO.getOs().startsWith("Windows")) { clock.setClockOffset(ClockDef.ClockOffset.LOCALTIME); clock.setTimer("rtc", "catchup", null); } vm.addComp(clock); DevicesDef devices = new DevicesDef(); devices.setEmulatorPath(_hypervisorPath); SerialDef serial = new SerialDef("pty", null, (short) 0); devices.addDevice(serial); ConsoleDef console = new ConsoleDef("pty", null, null, (short) 0); devices.addDevice(console); GraphicDef grap = new GraphicDef("vnc", (short) 0, true, vmTO.getVncAddr(), null, null); devices.addDevice(grap); InputDef input = new InputDef("tablet", "usb"); devices.addDevice(input); vm.addComp(devices); return vm; } protected void createVifs(VirtualMachineTO vmSpec, LibvirtVMDef vm) throws InternalErrorException, LibvirtException { NicTO[] nics = vmSpec.getNics(); for (int i = 0; i < nics.length; i++) { for (NicTO nic : vmSpec.getNics()) { if (nic.getDeviceId() == i) { createVif(vm, nic); } } } } protected synchronized StartAnswer execute(StartCommand cmd) { VirtualMachineTO vmSpec = cmd.getVirtualMachine(); vmSpec.setVncAddr(cmd.getHostIp()); String vmName = vmSpec.getName(); LibvirtVMDef vm = null; State state = State.Stopped; Connect conn = null; try { conn = LibvirtConnection.getConnection(); synchronized (_vms) { _vms.put(vmName, State.Starting); } vm = createVMFromSpec(vmSpec); createVbd(conn, vmSpec, vmName, vm); createVifs(vmSpec, vm); s_logger.debug("starting " + vmName + ": " + vm.toString()); startDomain(conn, vmName, vm.toString()); NicTO[] nics = vmSpec.getNics(); for (NicTO nic : nics) { if (nic.isSecurityGroupEnabled() || ( nic.getIsolationUri() != null && nic.getIsolationUri().getScheme().equalsIgnoreCase(IsolationType.Ec2.toString()))) { if (vmSpec.getType() != VirtualMachine.Type.User) { default_network_rules_for_systemvm(conn, vmName); break; } else { default_network_rules(conn, vmName, nic, vmSpec.getId()); } } } state = State.Running; return new StartAnswer(cmd); } catch (Exception e) { s_logger.warn("Exception ", e); if (conn != null) { handleVmStartFailure(conn, vmName, vm); } return new StartAnswer(cmd, e.getMessage()); } finally { synchronized (_vms) { if (state != State.Stopped) { _vms.put(vmName, state); } else { _vms.remove(vmName); } } } } private String getVolumePath(Connect conn, VolumeTO volume) throws LibvirtException, URISyntaxException { if (volume.getType() == Volume.Type.ISO && volume.getPath() != null) { String isoPath = volume.getPath(); int index = isoPath.lastIndexOf("/"); String path = isoPath.substring(0, index); String name = isoPath.substring(index + 1); KVMStoragePool secondaryPool = _storagePoolMgr.getStoragePoolByURI( path); KVMPhysicalDisk isoVol = secondaryPool.getPhysicalDisk(name); return isoVol.getPath(); } else { return volume.getPath(); } } protected void createVbd(Connect conn, VirtualMachineTO vmSpec, String vmName, LibvirtVMDef vm) throws InternalErrorException, LibvirtException, URISyntaxException { List<VolumeTO> disks = Arrays.asList(vmSpec.getDisks()); Collections.sort(disks, new Comparator<VolumeTO>() { @Override public int compare(VolumeTO arg0, VolumeTO arg1) { return arg0.getDeviceId() > arg1.getDeviceId() ? 1 : -1; } }); for (VolumeTO volume : disks) { KVMPhysicalDisk physicalDisk = null; KVMStoragePool pool = null; if (volume.getType() == Volume.Type.ISO && volume.getPath() != null) { String volPath = volume.getPath(); int index = volPath.lastIndexOf("/"); String volDir = volPath.substring(0, index); String volName = volPath.substring(index + 1); KVMStoragePool secondaryStorage = _storagePoolMgr. getStoragePoolByURI(volDir); physicalDisk = secondaryStorage.getPhysicalDisk(volName); } else if (volume.getType() != Volume.Type.ISO) { pool = _storagePoolMgr.getStoragePool( volume.getPoolType(), volume.getPoolUuid()); physicalDisk = pool.getPhysicalDisk(volume.getPath()); } String volPath = null; if (physicalDisk != null) { volPath = physicalDisk.getPath(); } DiskDef.diskBus diskBusType = getGuestDiskModel(vmSpec.getOs()); DiskDef disk = new DiskDef(); if (volume.getType() == Volume.Type.ISO) { if (volPath == null) { /* Add iso as placeholder */ disk.defISODisk(null); } else { disk.defISODisk(volPath); } } else { int devId = (int) volume.getDeviceId(); if (pool.getType() == StoragePoolType.RBD) { /* For RBD pools we use the secret mechanism in libvirt. We store the secret under the UUID of the pool, that's why we pass the pool's UUID as the authSecret */ disk.defNetworkBasedDisk(physicalDisk.getPath().replace("rbd:", ""), pool.getSourceHost(), pool.getSourcePort(), pool.getAuthUserName(), pool.getUuid(), devId, diskBusType, diskProtocol.RBD); } else if (pool.getType() == StoragePoolType.CLVM) { disk.defBlockBasedDisk(physicalDisk.getPath(), devId, diskBusType); } else { if (volume.getType() == Volume.Type.DATADISK) { disk.defFileBasedDisk(physicalDisk.getPath(), devId, DiskDef.diskBus.VIRTIO, DiskDef.diskFmtType.QCOW2); } else { disk.defFileBasedDisk(physicalDisk.getPath(), devId, diskBusType, DiskDef.diskFmtType.QCOW2); } } } vm.getDevices().addDevice(disk); } if (vmSpec.getType() != VirtualMachine.Type.User) { if (_sysvmISOPath != null) { DiskDef iso = new DiskDef(); iso.defISODisk(_sysvmISOPath); vm.getDevices().addDevice(iso); } createPatchVbd(conn, vmName, vm, vmSpec); } } private VolumeTO getVolume(VirtualMachineTO vmSpec, Volume.Type type) { VolumeTO volumes[] = vmSpec.getDisks(); for (VolumeTO volume : volumes) { if (volume.getType() == type) { return volume; } } return null; } private void createPatchVbd(Connect conn, String vmName, LibvirtVMDef vm, VirtualMachineTO vmSpec) throws LibvirtException, InternalErrorException { List<DiskDef> disks = vm.getDevices().getDisks(); DiskDef rootDisk = disks.get(0); VolumeTO rootVol = getVolume(vmSpec, Volume.Type.ROOT); String patchName = vmName + "-patchdisk"; KVMStoragePool pool = _storagePoolMgr.getStoragePool( rootVol.getPoolType(), rootVol.getPoolUuid()); String patchDiskPath = pool.getLocalPath() + "/" + patchName; List<KVMPhysicalDisk> phyDisks = pool.listPhysicalDisks(); boolean foundDisk = false; for (KVMPhysicalDisk phyDisk : phyDisks) { if (phyDisk.getPath().equals(patchDiskPath)) { foundDisk = true; break; } } if (!foundDisk) { s_logger.debug("generating new patch disk for " + vmName + " since none was found"); KVMPhysicalDisk disk = pool.createPhysicalDisk(patchName, KVMPhysicalDisk.PhysicalDiskFormat.RAW, 10L * 1024 * 1024); } else { s_logger.debug("found existing patch disk at " + patchDiskPath + " using it for " + vmName); } /* Format/create fs on this disk */ final Script command = new Script(_createvmPath, _timeout, s_logger); command.add("-f", patchDiskPath); String result = command.execute(); if (result != null) { s_logger.debug("Failed to create data disk: " + result); throw new InternalErrorException("Failed to create data disk: " + result); } /* add patch disk */ DiskDef patchDisk = new DiskDef(); if (pool.getType() == StoragePoolType.CLVM) { patchDisk.defBlockBasedDisk(patchDiskPath, 1, rootDisk.getBusType()); } else { patchDisk.defFileBasedDisk(patchDiskPath, 1, rootDisk.getBusType(), DiskDef.diskFmtType.RAW); } disks.add(patchDisk); String bootArgs = vmSpec.getBootArgs(); patchSystemVm(bootArgs, patchDiskPath, vmName); } private void createVif(LibvirtVMDef vm, NicTO nic) throws InternalErrorException, LibvirtException { vm.getDevices().addDevice( _vifDriver.plug(nic, vm.getGuestOSType()).toString()); } protected CheckSshAnswer execute(CheckSshCommand cmd) { String vmName = cmd.getName(); String privateIp = cmd.getIp(); int cmdPort = cmd.getPort(); if (s_logger.isDebugEnabled()) { s_logger.debug("Ping command port, " + privateIp + ":" + cmdPort); } try { String result = _virtRouterResource.connect(privateIp, cmdPort); if (result != null) { return new CheckSshAnswer(cmd, "Can not ping System vm " + vmName + "due to:" + result); } } catch (Exception e) { return new CheckSshAnswer(cmd, e); } if (s_logger.isDebugEnabled()) { s_logger.debug("Ping command port succeeded for vm " + vmName); } return new CheckSshAnswer(cmd); } private boolean cleanupDisk(Connect conn, DiskDef disk) { // need to umount secondary storage String path = disk.getDiskPath(); String poolUuid = null; if (path != null) { String[] token = path.split("/"); if (token.length > 3) { poolUuid = token[2]; } } if (poolUuid == null) { return true; } try { // we use libvirt as storage adaptor since we passed a libvirt // connection to cleanupDisk. We pass a storage type that maps // to libvirt adaptor. KVMStoragePool pool = _storagePoolMgr.getStoragePool( StoragePoolType.Filesystem, poolUuid); if (pool != null) { pool.delete(); } return true; } catch (CloudRuntimeException e) { return false; } } protected synchronized String attachOrDetachISO(Connect conn, String vmName, String isoPath, boolean isAttach) throws LibvirtException, URISyntaxException, InternalErrorException { String isoXml = null; if (isoPath != null && isAttach) { int index = isoPath.lastIndexOf("/"); String path = isoPath.substring(0, index); String name = isoPath.substring(index + 1); KVMStoragePool secondaryPool = _storagePoolMgr.getStoragePoolByURI( path); KVMPhysicalDisk isoVol = secondaryPool.getPhysicalDisk(name); isoPath = isoVol.getPath(); DiskDef iso = new DiskDef(); iso.defISODisk(isoPath); isoXml = iso.toString(); } else { DiskDef iso = new DiskDef(); iso.defISODisk(null); isoXml = iso.toString(); } List<DiskDef> disks = getDisks(conn, vmName); String result = attachOrDetachDevice(conn, true, vmName, isoXml); if (result == null && !isAttach) { for (DiskDef disk : disks) { if (disk.getDeviceType() == DiskDef.deviceType.CDROM) { cleanupDisk(conn, disk); } } } return result; } protected synchronized String attachOrDetachDisk(Connect conn, boolean attach, String vmName, KVMPhysicalDisk attachingDisk, int devId) throws LibvirtException, InternalErrorException { List<DiskDef> disks = null; Domain dm = null; DiskDef diskdef = null; try { if (!attach) { dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vmName .getBytes())); LibvirtDomainXMLParser parser = new LibvirtDomainXMLParser(); String xml = dm.getXMLDesc(0); parser.parseDomainXML(xml); disks = parser.getDisks(); for (DiskDef disk : disks) { String file = disk.getDiskPath(); if (file != null && file.equalsIgnoreCase(attachingDisk.getPath())) { diskdef = disk; break; } } if (diskdef == null) { throw new InternalErrorException("disk: " + attachingDisk.getPath() + " is not attached before"); } } else { diskdef = new DiskDef(); if (attachingDisk.getFormat() == PhysicalDiskFormat.QCOW2) { diskdef.defFileBasedDisk(attachingDisk.getPath(), devId, DiskDef.diskBus.VIRTIO, DiskDef.diskFmtType.QCOW2); } else if (attachingDisk.getFormat() == PhysicalDiskFormat.RAW) { diskdef.defBlockBasedDisk(attachingDisk.getPath(), devId, DiskDef.diskBus.VIRTIO); } } String xml = diskdef.toString(); return attachOrDetachDevice(conn, attach, vmName, xml); } finally { if (dm != null) { dm.free(); } } } protected synchronized String attachOrDetachDevice(Connect conn, boolean attach, String vmName, String xml) throws LibvirtException, InternalErrorException { Domain dm = null; try { dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes((vmName .getBytes()))); if (attach) { s_logger.debug("Attaching device: " + xml); dm.attachDevice(xml); } else { s_logger.debug("Detaching device: " + xml); dm.detachDevice(xml); } } catch (LibvirtException e) { if (attach) { s_logger.warn("Failed to attach device to " + vmName + ": " + e.getMessage()); } else { s_logger.warn("Failed to detach device from " + vmName + ": " + e.getMessage()); } throw e; } catch (Exception e) { throw new InternalErrorException(e.toString()); } finally { if (dm != null) { try { dm.free(); } catch (LibvirtException l) { } } } return null; } @Override public PingCommand getCurrentStatus(long id) { final HashMap<String, State> newStates = sync(); if (!_can_bridge_firewall) { return new PingRoutingCommand(com.cloud.host.Host.Type.Routing, id, newStates); } else { HashMap<String, Pair<Long, Long>> nwGrpStates = syncNetworkGroups(id); return new PingRoutingWithNwGroupsCommand(getType(), id, newStates, nwGrpStates); } } @Override public Type getType() { return Type.Routing; } private Map<String, String> getVersionStrings() { final Script command = new Script(_versionstringpath, _timeout, s_logger); KeyValueInterpreter kvi = new KeyValueInterpreter(); String result = command.execute(kvi); if (result == null) { return kvi.getKeyValues(); } else { return new HashMap<String, String>(1); } } @Override public StartupCommand[] initialize() { Map<String, State> changes = null; synchronized (_vms) { _vms.clear(); changes = sync(); } final List<Object> info = getHostInfo(); final StartupRoutingCommand cmd = new StartupRoutingCommand( (Integer) info.get(0), (Long) info.get(1), (Long) info.get(2), (Long) info.get(4), (String) info.get(3), HypervisorType.KVM, RouterPrivateIpStrategy.HostLocal); cmd.setStateChanges(changes); fillNetworkInformation(cmd); _privateIp = cmd.getPrivateIpAddress(); cmd.getHostDetails().putAll(getVersionStrings()); cmd.setPool(_pool); cmd.setCluster(_clusterId); cmd.setGatewayIpAddress(_localGateway); StartupStorageCommand sscmd = null; try { KVMStoragePool localStoragePool = _storagePoolMgr .createStoragePool(_localStorageUUID, "localhost", -1, _localStoragePath, "", StoragePoolType.Filesystem); com.cloud.agent.api.StoragePoolInfo pi = new com.cloud.agent.api.StoragePoolInfo( localStoragePool.getUuid(), cmd.getPrivateIpAddress(), _localStoragePath, _localStoragePath, StoragePoolType.Filesystem, localStoragePool.getCapacity(), localStoragePool.getUsed()); sscmd = new StartupStorageCommand(); sscmd.setPoolInfo(pi); sscmd.setGuid(pi.getUuid()); sscmd.setDataCenter(_dcId); sscmd.setResourceType(Storage.StorageResourceType.STORAGE_POOL); } catch (CloudRuntimeException e) { } if (sscmd != null) { return new StartupCommand[] { cmd, sscmd }; } else { return new StartupCommand[] { cmd }; } } protected HashMap<String, State> sync() { HashMap<String, State> newStates; HashMap<String, State> oldStates = null; final HashMap<String, State> changes = new HashMap<String, State>(); synchronized (_vms) { newStates = getAllVms(); if (newStates == null) { s_logger.debug("Unable to get the vm states so no state sync at this point."); return changes; } oldStates = new HashMap<String, State>(_vms.size()); oldStates.putAll(_vms); for (final Map.Entry<String, State> entry : newStates.entrySet()) { final String vm = entry.getKey(); State newState = entry.getValue(); final State oldState = oldStates.remove(vm); if (newState == State.Stopped && oldState != State.Stopping && oldState != null && oldState != State.Stopped) { newState = getRealPowerState(vm); } if (s_logger.isTraceEnabled()) { s_logger.trace("VM " + vm + ": libvirt has state " + newState + " and we have state " + (oldState != null ? oldState.toString() : "null")); } if (vm.startsWith("migrating")) { s_logger.debug("Migration detected. Skipping"); continue; } if (oldState == null) { _vms.put(vm, newState); s_logger.debug("Detecting a new state but couldn't find a old state so adding it to the changes: " + vm); changes.put(vm, newState); } else if (oldState == State.Starting) { if (newState == State.Running) { _vms.put(vm, newState); } else if (newState == State.Stopped) { s_logger.debug("Ignoring vm " + vm + " because of a lag in starting the vm."); } } else if (oldState == State.Migrating) { if (newState == State.Running) { s_logger.debug("Detected that an migrating VM is now running: " + vm); _vms.put(vm, newState); } } else if (oldState == State.Stopping) { if (newState == State.Stopped) { _vms.put(vm, newState); } else if (newState == State.Running) { s_logger.debug("Ignoring vm " + vm + " because of a lag in stopping the vm. "); } } else if (oldState != newState) { _vms.put(vm, newState); if (newState == State.Stopped) { if (_vmsKilled.remove(vm)) { s_logger.debug("VM " + vm + " has been killed for storage. "); newState = State.Error; } } changes.put(vm, newState); } } for (final Map.Entry<String, State> entry : oldStates.entrySet()) { final String vm = entry.getKey(); final State oldState = entry.getValue(); if (s_logger.isTraceEnabled()) { s_logger.trace("VM " + vm + " is now missing from libvirt so reporting stopped"); } if (oldState == State.Stopping) { s_logger.debug("Ignoring VM " + vm + " in transition state stopping."); _vms.remove(vm); } else if (oldState == State.Starting) { s_logger.debug("Ignoring VM " + vm + " in transition state starting."); } else if (oldState == State.Stopped) { _vms.remove(vm); } else if (oldState == State.Migrating) { s_logger.debug("Ignoring VM " + vm + " in migrating state."); } else { _vms.remove(vm); State state = State.Stopped; if (_vmsKilled.remove(entry.getKey())) { s_logger.debug("VM " + vm + " has been killed by storage monitor"); state = State.Error; } changes.put(entry.getKey(), state); } } } return changes; } protected State getRealPowerState(String vm) { int i = 0; s_logger.trace("Checking on the HALTED State"); Domain dm = null; for (; i < 5; i++) { try { Connect conn = LibvirtConnection.getConnection(); dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vm .getBytes())); DomainInfo.DomainState vps = dm.getInfo().state; if (vps != null && vps != DomainInfo.DomainState.VIR_DOMAIN_SHUTOFF && vps != DomainInfo.DomainState.VIR_DOMAIN_NOSTATE) { return convertToState(vps); } } catch (final LibvirtException e) { s_logger.trace(e.getMessage()); } catch (Exception e) { s_logger.trace(e.getMessage()); } finally { try { if (dm != null) { dm.free(); } } catch (final LibvirtException e) { } } try { Thread.sleep(1000); } catch (InterruptedException e) { } } return State.Stopped; } protected List<String> getAllVmNames(Connect conn) { ArrayList<String> la = new ArrayList<String>(); try { final String names[] = conn.listDefinedDomains(); for (int i = 0; i < names.length; i++) { la.add(names[i]); } } catch (final LibvirtException e) { s_logger.warn("Failed to list Defined domains", e); } int[] ids = null; try { ids = conn.listDomains(); } catch (final LibvirtException e) { s_logger.warn("Failed to list domains", e); return la; } Domain dm = null; for (int i = 0; i < ids.length; i++) { try { dm = conn.domainLookupByID(ids[i]); la.add(dm.getName()); } catch (final LibvirtException e) { s_logger.warn("Unable to get vms", e); } finally { try { if (dm != null) { dm.free(); } } catch (final LibvirtException e) { } } } return la; } private HashMap<String, State> getAllVms() { final HashMap<String, State> vmStates = new HashMap<String, State>(); String[] vms = null; int[] ids = null; Connect conn = null; try { conn = LibvirtConnection.getConnection(); } catch (LibvirtException e) { s_logger.debug("Failed to get connection: " + e.getMessage()); return vmStates; } try { ids = conn.listDomains(); } catch (final LibvirtException e) { s_logger.warn("Unable to listDomains", e); return null; } try { vms = conn.listDefinedDomains(); } catch (final LibvirtException e) { s_logger.warn("Unable to listDomains", e); return null; } Domain dm = null; for (int i = 0; i < ids.length; i++) { try { dm = conn.domainLookupByID(ids[i]); DomainInfo.DomainState ps = dm.getInfo().state; final State state = convertToState(ps); s_logger.trace("VM " + dm.getName() + ": powerstate = " + ps + "; vm state=" + state.toString()); String vmName = dm.getName(); vmStates.put(vmName, state); } catch (final LibvirtException e) { s_logger.warn("Unable to get vms", e); } finally { try { if (dm != null) { dm.free(); } } catch (LibvirtException e) { } } } for (int i = 0; i < vms.length; i++) { try { dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vms[i] .getBytes())); DomainInfo.DomainState ps = dm.getInfo().state; final State state = convertToState(ps); String vmName = dm.getName(); s_logger.trace("VM " + vmName + ": powerstate = " + ps + "; vm state=" + state.toString()); vmStates.put(vmName, state); } catch (final LibvirtException e) { s_logger.warn("Unable to get vms", e); } catch (Exception e) { s_logger.warn("Unable to get vms", e); } finally { try { if (dm != null) { dm.free(); } } catch (LibvirtException e) { } } } return vmStates; } protected List<Object> getHostInfo() { final ArrayList<Object> info = new ArrayList<Object>(); long speed = 0; long cpus = 0; long ram = 0; String cap = null; try { Connect conn = LibvirtConnection.getConnection(); final NodeInfo hosts = conn.nodeInfo(); boolean result = false; try { BufferedReader in = new BufferedReader( new FileReader( "/sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_max_freq")); speed = Long.parseLong(in.readLine()) / 1000; result = true; } catch (FileNotFoundException e) { } catch (IOException e) { } catch (NumberFormatException e) { } if (!result) { speed = hosts.mhz; } cpus = hosts.cpus; ram = hosts.memory * 1024L; LibvirtCapXMLParser parser = new LibvirtCapXMLParser(); parser.parseCapabilitiesXML(conn.getCapabilities()); ArrayList<String> oss = parser.getGuestOsType(); for (String s : oss) { /* * Even host supports guest os type more than hvm, we only * report hvm to management server */ if (s.equalsIgnoreCase("hvm")) { cap = "hvm"; } } } catch (LibvirtException e) { } if (isSnapshotSupported()) { cap = cap + ",snapshot"; } info.add((int) cpus); info.add(speed); info.add(ram); info.add(cap); long dom0ram = Math.min(ram / 10, 768 * 1024 * 1024L);// save a maximum // of 10% of // system ram or // 768M dom0ram = Math.max(dom0ram, _dom0MinMem); info.add(dom0ram); s_logger.debug("cpus=" + cpus + ", speed=" + speed + ", ram=" + ram + ", dom0ram=" + dom0ram); return info; } protected void cleanupVM(Connect conn, final String vmName, final String vnet) { s_logger.debug("Trying to cleanup the vnet: " + vnet); if (vnet != null) { cleanupVnet(conn, vnet); } _vmStats.remove(vmName); } protected String rebootVM(Connect conn, String vmName) { Domain dm = null; String msg = null; try { dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vmName .getBytes())); String vmDef = dm.getXMLDesc(0); s_logger.debug(vmDef); msg = stopVM(conn, vmName, defineOps.UNDEFINE_VM); msg = startDomain(conn, vmName, vmDef); return null; } catch (LibvirtException e) { s_logger.warn("Failed to create vm", e); msg = e.getMessage(); } catch (Exception e) { s_logger.warn("Failed to create vm", e); msg = e.getMessage(); } finally { try { if (dm != null) { dm.free(); } } catch (LibvirtException e) { } } return msg; } protected String stopVM(Connect conn, String vmName, defineOps df) { DomainInfo.DomainState state = null; Domain dm = null; s_logger.debug("Try to stop the vm at first"); String ret = stopVM(conn, vmName, false); if (ret == Script.ERR_TIMEOUT) { ret = stopVM(conn, vmName, true); } else if (ret != null) { /* * There is a race condition between libvirt and qemu: libvirt * listens on qemu's monitor fd. If qemu is shutdown, while libvirt * is reading on the fd, then libvirt will report an error. */ /* Retry 3 times, to make sure we can get the vm's status */ for (int i = 0; i < 3; i++) { try { dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vmName .getBytes())); state = dm.getInfo().state; break; } catch (LibvirtException e) { s_logger.debug("Failed to get vm status:" + e.getMessage()); } catch (Exception e) { s_logger.debug("Failed to get vm status:" + e.getMessage()); } finally { try { if (dm != null) { dm.free(); } } catch (LibvirtException l) { } } } if (state == null) { s_logger.debug("Can't get vm's status, assume it's dead already"); return null; } if (state != DomainInfo.DomainState.VIR_DOMAIN_SHUTOFF) { s_logger.debug("Try to destroy the vm"); ret = stopVM(conn, vmName, true); if (ret != null) { return ret; } } } if (df == defineOps.UNDEFINE_VM) { try { dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vmName .getBytes())); dm.undefine(); } catch (LibvirtException e) { } finally { try { if (dm != null) { dm.free(); } } catch (LibvirtException l) { } } } return null; } protected String stopVM(Connect conn, String vmName, boolean force) { Domain dm = null; try { dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vmName .getBytes())); if (force) { if (dm.getInfo().state != DomainInfo.DomainState.VIR_DOMAIN_SHUTOFF) { dm.destroy(); } } else { if (dm.getInfo().state == DomainInfo.DomainState.VIR_DOMAIN_SHUTOFF) { return null; } dm.shutdown(); int retry = _stopTimeout / 2000; /* Wait for the domain gets into shutoff state */ while ((dm.getInfo().state != DomainInfo.DomainState.VIR_DOMAIN_SHUTOFF) && (retry >= 0)) { Thread.sleep(2000); retry--; } if (retry < 0) { s_logger.warn("Timed out waiting for domain " + vmName + " to shutdown gracefully"); return Script.ERR_TIMEOUT; } } } catch (LibvirtException e) { s_logger.debug("Failed to stop VM :" + vmName + " :", e); return e.getMessage(); } catch (InterruptedException ie) { s_logger.debug("Interrupted sleep"); return ie.getMessage(); } catch (Exception e) { s_logger.debug("Failed to stop VM :" + vmName + " :", e); return e.getMessage(); } finally { try { if (dm != null) { dm.free(); } } catch (LibvirtException e) { } } return null; } public synchronized String cleanupVnet(Connect conn, final String vnetId) { // VNC proxy VMs do not have vnet if (vnetId == null || vnetId.isEmpty() || isDirectAttachedNetwork(vnetId)) { return null; } final List<String> names = getAllVmNames(conn); if (!names.isEmpty()) { for (final String name : names) { if (VirtualMachineName.getVnet(name).equals(vnetId)) { return null; // Can't remove the vnet yet. } } } final Script command = new Script(_modifyVlanPath, _timeout, s_logger); command.add("-o", "delete"); command.add("-v", vnetId); return command.execute(); } protected Integer getVncPort(Connect conn, String vmName) throws LibvirtException { LibvirtDomainXMLParser parser = new LibvirtDomainXMLParser(); Domain dm = null; try { dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vmName .getBytes())); String xmlDesc = dm.getXMLDesc(0); parser.parseDomainXML(xmlDesc); return parser.getVncPort(); } finally { try { if (dm != null) { dm.free(); } } catch (LibvirtException l) { } } } private boolean IsHVMEnabled(Connect conn) { LibvirtCapXMLParser parser = new LibvirtCapXMLParser(); try { parser.parseCapabilitiesXML(conn.getCapabilities()); ArrayList<String> osTypes = parser.getGuestOsType(); for (String o : osTypes) { if (o.equalsIgnoreCase("hvm")) { return true; } } } catch (LibvirtException e) { } return false; } private String getHypervisorPath(Connect conn) { LibvirtCapXMLParser parser = new LibvirtCapXMLParser(); try { parser.parseCapabilitiesXML(conn.getCapabilities()); } catch (LibvirtException e) { s_logger.debug(e.getMessage()); } return parser.getEmulator(); } private String getGuestType(Connect conn, String vmName) { LibvirtDomainXMLParser parser = new LibvirtDomainXMLParser(); Domain dm = null; try { dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vmName .getBytes())); String xmlDesc = dm.getXMLDesc(0); parser.parseDomainXML(xmlDesc); return parser.getDescription(); } catch (LibvirtException e) { return null; } catch (Exception e) { return null; } finally { try { if (dm != null) { dm.free(); } } catch (LibvirtException l) { } } } boolean isGuestPVEnabled(String guestOS) { if (guestOS == null) { return false; } String guestOSName = KVMGuestOsMapper.getGuestOsName(guestOS); if (guestOS.startsWith("Ubuntu") || guestOSName.startsWith("Fedora 13") || guestOSName.startsWith("Fedora 12") || guestOSName.startsWith("Fedora 11") || guestOSName.startsWith("Fedora 10") || guestOSName.startsWith("Fedora 9") || guestOSName.startsWith("CentOS 5.3") || guestOSName.startsWith("CentOS 5.4") || guestOSName.startsWith("CentOS 5.5") || guestOS.startsWith("CentOS") || guestOS.startsWith("Fedora") || guestOSName.startsWith("Red Hat Enterprise Linux 5.3") || guestOSName.startsWith("Red Hat Enterprise Linux 5.4") || guestOSName.startsWith("Red Hat Enterprise Linux 5.5") || guestOSName.startsWith("Red Hat Enterprise Linux 6") || guestOS.startsWith("Debian GNU/Linux") || guestOSName.startsWith("Other PV")) { return true; } else { return false; } } public boolean isCentosHost() { if (_hvVersion <= 9) { return true; } else { return false; } } private InterfaceDef.nicModel getGuestNicModel(String guestOSType) { if (isGuestPVEnabled(guestOSType)) { return InterfaceDef.nicModel.VIRTIO; } else { return InterfaceDef.nicModel.E1000; } } private DiskDef.diskBus getGuestDiskModel(String guestOSType) { if (isGuestPVEnabled(guestOSType)) { return DiskDef.diskBus.VIRTIO; } else { return DiskDef.diskBus.IDE; } } private String getVnetIdFromBrName(String vnetBrName) { if (vnetBrName.contains("cloudVirBr")) { return vnetBrName.replaceAll("cloudVirBr", ""); } else { Pattern r = Pattern.compile("-(\\d+)$"); Matcher m = r.matcher(vnetBrName); if(m.group(1) != null || !m.group(1).isEmpty()) { return m.group(1); } else { s_logger.debug("unable to get a vlan ID from name " + vnetBrName); return ""; } } } private void cleanupVMNetworks(Connect conn, List<InterfaceDef> nics) { for (InterfaceDef nic : nics) { if (nic.getHostNetType() == hostNicType.VNET) { cleanupVnet(conn, getVnetIdFromBrName(nic.getBrName())); } } } private Domain getDomain(Connect conn, String vmName) throws LibvirtException { return conn .domainLookupByUUID(UUID.nameUUIDFromBytes(vmName.getBytes())); } protected List<InterfaceDef> getInterfaces(Connect conn, String vmName) { LibvirtDomainXMLParser parser = new LibvirtDomainXMLParser(); Domain dm = null; try { dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vmName .getBytes())); parser.parseDomainXML(dm.getXMLDesc(0)); return parser.getInterfaces(); } catch (LibvirtException e) { s_logger.debug("Failed to get dom xml: " + e.toString()); return new ArrayList<InterfaceDef>(); } catch (Exception e) { s_logger.debug("Failed to get dom xml: " + e.toString()); return new ArrayList<InterfaceDef>(); } finally { try { if (dm != null) { dm.free(); } } catch (LibvirtException e) { } } } protected List<DiskDef> getDisks(Connect conn, String vmName) { LibvirtDomainXMLParser parser = new LibvirtDomainXMLParser(); Domain dm = null; try { dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vmName .getBytes())); parser.parseDomainXML(dm.getXMLDesc(0)); return parser.getDisks(); } catch (LibvirtException e) { s_logger.debug("Failed to get dom xml: " + e.toString()); return new ArrayList<DiskDef>(); } catch (Exception e) { s_logger.debug("Failed to get dom xml: " + e.toString()); return new ArrayList<DiskDef>(); } finally { try { if (dm != null) { dm.free(); } } catch (LibvirtException e) { } } } private String executeBashScript(String script) { Script command = new Script("/bin/bash", _timeout, s_logger); command.add("-c"); command.add(script); return command.execute(); } private String executeBashScript(String script, OutputInterpreter parser) { Script command = new Script("/bin/bash", _timeout, s_logger); command.add("-c"); command.add(script); return command.execute(parser); } private void deletExitingLinkLocalRoutTable(String linkLocalBr) { Script command = new Script("/bin/bash", _timeout); command.add("-c"); command.add("ip route | grep " + NetUtils.getLinkLocalCIDR()); OutputInterpreter.AllLinesParser parser = new OutputInterpreter.AllLinesParser(); String result = command.execute(parser); boolean foundLinkLocalBr = false; if (result == null && parser.getLines() != null) { String[] lines = parser.getLines().split("\\n"); for (String line : lines) { String[] tokens = line.split(" "); if (!tokens[2].equalsIgnoreCase(linkLocalBr)) { Script.runSimpleBashScript("ip route del " + NetUtils.getLinkLocalCIDR()); } else { foundLinkLocalBr = true; } } } if (!foundLinkLocalBr) { Script.runSimpleBashScript("ip route add " + NetUtils.getLinkLocalCIDR() + " dev " + linkLocalBr + " src " + NetUtils.getLinkLocalGateway()); } } private class vmStats { long _usedTime; long _tx; long _rx; Calendar _timestamp; } private VmStatsEntry getVmStat(Connect conn, String vmName) throws LibvirtException { Domain dm = null; try { dm = getDomain(conn, vmName); DomainInfo info = dm.getInfo(); VmStatsEntry stats = new VmStatsEntry(); stats.setNumCPUs(info.nrVirtCpu); stats.setEntityType("vm"); /* get cpu utilization */ vmStats oldStats = null; Calendar now = Calendar.getInstance(); oldStats = _vmStats.get(vmName); long elapsedTime = 0; if (oldStats != null) { elapsedTime = now.getTimeInMillis() - oldStats._timestamp.getTimeInMillis(); double utilization = (info.cpuTime - oldStats._usedTime) / ((double) elapsedTime * 1000000); NodeInfo node = conn.nodeInfo(); utilization = utilization / node.cpus; if(utilization > 0){ stats.setCPUUtilization(utilization * 100); } } /* get network stats */ List<InterfaceDef> vifs = getInterfaces(conn, vmName); long rx = 0; long tx = 0; for (InterfaceDef vif : vifs) { DomainInterfaceStats ifStats = dm.interfaceStats(vif .getDevName()); rx += ifStats.rx_bytes; tx += ifStats.tx_bytes; } if (oldStats != null) { long deltarx = rx - oldStats._rx; if (deltarx > 0) stats.setNetworkReadKBs(deltarx / 1000); long deltatx = tx - oldStats._tx; if (deltatx > 0) stats.setNetworkWriteKBs(deltatx / 1000); } vmStats newStat = new vmStats(); newStat._usedTime = info.cpuTime; newStat._rx = rx; newStat._tx = tx; newStat._timestamp = now; _vmStats.put(vmName, newStat); return stats; } finally { if (dm != null) { dm.free(); } } } private boolean can_bridge_firewall(String prvNic) { Script cmd = new Script(_securityGroupPath, _timeout, s_logger); cmd.add("can_bridge_firewall"); cmd.add(prvNic); String result = cmd.execute(); if (result != null) { return false; } return true; } protected boolean destroy_network_rules_for_vm(Connect conn, String vmName) { if (!_can_bridge_firewall) { return false; } String vif = null; List<InterfaceDef> intfs = getInterfaces(conn, vmName); if (intfs.size() > 0) { InterfaceDef intf = intfs.get(0); vif = intf.getDevName(); } Script cmd = new Script(_securityGroupPath, _timeout, s_logger); cmd.add("destroy_network_rules_for_vm"); cmd.add("--vmname", vmName); if (vif != null) { cmd.add("--vif", vif); } String result = cmd.execute(); if (result != null) { return false; } return true; } protected boolean default_network_rules(Connect conn, String vmName, NicTO nic, Long vmId) { if (!_can_bridge_firewall) { return false; } List<InterfaceDef> intfs = getInterfaces(conn, vmName); if (intfs.size() < nic.getDeviceId()) { return false; } InterfaceDef intf = intfs.get(nic.getDeviceId()); String brname = intf.getBrName(); String vif = intf.getDevName(); Script cmd = new Script(_securityGroupPath, _timeout, s_logger); cmd.add("default_network_rules"); cmd.add("--vmname", vmName); cmd.add("--vmid", vmId.toString()); if (nic.getIp() != null) { cmd.add("--vmip", nic.getIp()); } cmd.add("--vmmac", nic.getMac()); cmd.add("--vif", vif); cmd.add("--brname", brname); String result = cmd.execute(); if (result != null) { return false; } return true; } protected boolean post_default_network_rules(Connect conn, String vmName, NicTO nic, Long vmId, InetAddress dhcpServerIp, String hostIp, String hostMacAddr) { if (!_can_bridge_firewall) { return false; } List<InterfaceDef> intfs = getInterfaces(conn, vmName); if (intfs.size() < nic.getDeviceId()) { return false; } InterfaceDef intf = intfs.get(nic.getDeviceId()); String brname = intf.getBrName(); String vif = intf.getDevName(); Script cmd = new Script(_securityGroupPath, _timeout, s_logger); cmd.add("post_default_network_rules"); cmd.add("--vmname", vmName); cmd.add("--vmid", vmId.toString()); cmd.add("--vmip", nic.getIp()); cmd.add("--vmmac", nic.getMac()); cmd.add("--vif", vif); cmd.add("--brname", brname); if (dhcpServerIp != null) cmd.add("--dhcpSvr", dhcpServerIp.getHostAddress()); cmd.add("--hostIp", hostIp); cmd.add("--hostMacAddr", hostMacAddr); String result = cmd.execute(); if (result != null) { return false; } return true; } protected boolean default_network_rules_for_systemvm(Connect conn, String vmName) { if (!_can_bridge_firewall) { return false; } Script cmd = new Script(_securityGroupPath, _timeout, s_logger); cmd.add("default_network_rules_systemvm"); cmd.add("--vmname", vmName); cmd.add("--localbrname", _linkLocalBridgeName); String result = cmd.execute(); if (result != null) { return false; } return true; } private boolean add_network_rules(String vmName, String vmId, String guestIP, String sig, String seq, String mac, String rules, String vif, String brname) { if (!_can_bridge_firewall) { return false; } String newRules = rules.replace(" ", ";"); Script cmd = new Script(_securityGroupPath, _timeout, s_logger); cmd.add("add_network_rules"); cmd.add("--vmname", vmName); cmd.add("--vmid", vmId); cmd.add("--vmip", guestIP); cmd.add("--sig", sig); cmd.add("--seq", seq); cmd.add("--vmmac", mac); cmd.add("--vif", vif); cmd.add("--brname", brname); if (rules != null) { cmd.add("--rules", newRules); } String result = cmd.execute(); if (result != null) { return false; } return true; } private boolean cleanup_rules() { if (!_can_bridge_firewall) { return false; } Script cmd = new Script(_securityGroupPath, _timeout, s_logger); cmd.add("cleanup_rules"); String result = cmd.execute(); if (result != null) { return false; } return true; } private String get_rule_logs_for_vms() { Script cmd = new Script(_securityGroupPath, _timeout, s_logger); cmd.add("get_rule_logs_for_vms"); OutputInterpreter.OneLineParser parser = new OutputInterpreter.OneLineParser(); String result = cmd.execute(parser); if (result == null) { return parser.getLine(); } return null; } private HashMap<String, Pair<Long, Long>> syncNetworkGroups(long id) { HashMap<String, Pair<Long, Long>> states = new HashMap<String, Pair<Long, Long>>(); String result = get_rule_logs_for_vms(); s_logger.trace("syncNetworkGroups: id=" + id + " got: " + result); String[] rulelogs = result != null ? result.split(";") : new String[0]; for (String rulesforvm : rulelogs) { String[] log = rulesforvm.split(","); if (log.length != 6) { continue; } try { states.put(log[0], new Pair<Long, Long>(Long.parseLong(log[1]), Long.parseLong(log[5]))); } catch (NumberFormatException nfe) { states.put(log[0], new Pair<Long, Long>(-1L, -1L)); } } return states; } /* online snapshot supported by enhanced qemu-kvm */ private boolean isSnapshotSupported() { String result = executeBashScript("qemu-img --help|grep convert"); if (result != null) { return false; } else { return true; } } private Pair<Double, Double> getNicStats(String nicName) { double rx = 0.0; String rxFile = "/sys/class/net/" + nicName + "/statistics/rx_bytes"; String rxContent = FileUtil.readFileAsString(rxFile); if (rxContent == null) { s_logger.warn("Failed to read the rx_bytes for " + nicName + " from " + rxFile); } rx = Double.parseDouble(rxContent); double tx = 0.0; String txFile = "/sys/class/net/" + nicName + "/statistics/tx_bytes"; String txContent = FileUtil.readFileAsString(txFile); if (txContent == null) { s_logger.warn("Failed to read the tx_bytes for " + nicName + " from " + txFile); } tx = Double.parseDouble(txContent); return new Pair<Double, Double>(rx, tx); } private Answer execute(NetworkRulesSystemVmCommand cmd) { boolean success = false; Connect conn; try { conn = LibvirtConnection.getConnection(); success = default_network_rules_for_systemvm(conn, cmd.getVmName()); } catch (LibvirtException e) { // TODO Auto-generated catch block e.printStackTrace(); } return new Answer(cmd, success, ""); } @Override public void setName(String name) { // TODO Auto-generated method stub } @Override public void setConfigParams(Map<String, Object> params) { // TODO Auto-generated method stub } @Override public Map<String, Object> getConfigParams() { // TODO Auto-generated method stub return null; } @Override public int getRunLevel() { // TODO Auto-generated method stub return 0; } @Override public void setRunLevel(int level) { // TODO Auto-generated method stub } }
agent: Do not define domains persistent in libvirt We used to define domains persistent in libvirt, which caused XML definitions to stay there after a reboot of the hypervisor. We however don't do anything with those already defined domains, actually, we wipe all defined domains when starting the agent. Some users however reported that libvirt started these domains after a reboot before the CloudStack agent was started. By starting domains from the XML description and not defining them we prevent them from ever being stored in libvirt.
plugins/hypervisors/kvm/src/com/cloud/hypervisor/kvm/resource/LibvirtComputingResource.java
agent: Do not define domains persistent in libvirt
<ide><path>lugins/hypervisors/kvm/src/com/cloud/hypervisor/kvm/resource/LibvirtComputingResource.java <ide> NATIVE, OPENVSWITCH <ide> } <ide> <del> protected enum defineOps { <del> UNDEFINE_VM, DEFINE_VM <del> } <del> <ide> protected BridgeType _bridgeType; <ide> <ide> private String getEndIpFromStartIp(String startIp, int numIps) { <ide> <ide> protected String startDomain(Connect conn, String vmName, String domainXML) <ide> throws LibvirtException, InternalErrorException { <del> /* No duplicated vm, we will success, or failed */ <del> boolean failed = false; <ide> Domain dm = null; <ide> try { <del> dm = conn.domainDefineXML(domainXML); <add> /* <add> We create a transient domain here. When this method gets <add> called we receive a full XML specification of the guest, <add> so no need to define it persistent. <add> <add> This also makes sure we never have any old "garbage" defined <add> in libvirt which might haunt us. <add> */ <add> dm = conn.domainCreateXML(domainXML, 0); <ide> } catch (final LibvirtException e) { <del> /* Duplicated defined vm */ <del> s_logger.warn("Failed to define domain " + vmName + ": " <add> s_logger.warn("Failed to start domain " + vmName + ": " <ide> + e.getMessage()); <del> failed = true; <del> } finally { <del> try { <del> if (dm != null) { <del> dm.free(); <del> } <del> } catch (final LibvirtException e) { <del> <del> } <del> } <del> <del> /* If failed, undefine the vm */ <del> Domain dmOld = null; <del> Domain dmNew = null; <del> try { <del> if (failed) { <del> dmOld = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vmName <del> .getBytes())); <del> dmOld.undefine(); <del> dmNew = conn.domainDefineXML(domainXML); <del> } <del> } catch (final LibvirtException e) { <del> s_logger.warn("Failed to define domain (second time) " + vmName <del> + ": " + e.getMessage()); <del> throw e; <del> } catch (Exception e) { <del> s_logger.warn("Failed to define domain (second time) " + vmName <del> + ": " + e.getMessage()); <del> throw new InternalErrorException(e.toString()); <del> } finally { <del> try { <del> if (dmOld != null) { <del> dmOld.free(); <del> } <del> if (dmNew != null) { <del> dmNew.free(); <del> } <del> } catch (final LibvirtException e) { <del> <del> } <del> } <del> <del> /* Start the VM */ <del> try { <del> dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vmName <del> .getBytes())); <del> dm.create(); <del> } catch (LibvirtException e) { <del> s_logger.warn("Failed to start domain: " + vmName + ": " <del> + e.getMessage()); <del> throw e; <del> } finally { <del> try { <del> if (dm != null) { <del> dm.free(); <del> } <del> } catch (final LibvirtException e) { <del> <del> } <del> } <add> } <add> <ide> return null; <ide> } <ide> <ide> List<InterfaceDef> ifaces = getInterfaces(conn, vmName); <ide> <ide> destroy_network_rules_for_vm(conn, vmName); <del> String result = stopVM(conn, vmName, defineOps.UNDEFINE_VM); <add> String result = stopVM(conn, vmName); <ide> if (result == null) { <ide> for (DiskDef disk : disks) { <ide> if (disk.getDeviceType() == DiskDef.deviceType.CDROM <ide> .getBytes())); <ide> String vmDef = dm.getXMLDesc(0); <ide> s_logger.debug(vmDef); <del> msg = stopVM(conn, vmName, defineOps.UNDEFINE_VM); <add> msg = stopVM(conn, vmName); <ide> msg = startDomain(conn, vmName, vmDef); <ide> return null; <ide> } catch (LibvirtException e) { <ide> return msg; <ide> } <ide> <del> protected String stopVM(Connect conn, String vmName, defineOps df) { <add> protected String stopVM(Connect conn, String vmName) { <ide> DomainInfo.DomainState state = null; <ide> Domain dm = null; <ide> <ide> } <ide> } <ide> <del> if (df == defineOps.UNDEFINE_VM) { <del> try { <del> dm = conn.domainLookupByUUID(UUID.nameUUIDFromBytes(vmName <del> .getBytes())); <del> dm.undefine(); <del> } catch (LibvirtException e) { <del> <del> } finally { <del> try { <del> if (dm != null) { <del> dm.free(); <del> } <del> } catch (LibvirtException l) { <del> <del> } <del> } <del> } <ide> return null; <ide> } <ide>
Java
agpl-3.0
7b04e7602f0c73376f7817eb4581f8ac72d2d95a
0
LibrePlan/libreplan,skylow95/libreplan,Marine-22/libre,dgray16/libreplan,dgray16/libreplan,poum/libreplan,poum/libreplan,poum/libreplan,LibrePlan/libreplan,PaulLuchyn/libreplan,dgray16/libreplan,skylow95/libreplan,LibrePlan/libreplan,Marine-22/libre,skylow95/libreplan,LibrePlan/libreplan,Marine-22/libre,skylow95/libreplan,LibrePlan/libreplan,Marine-22/libre,LibrePlan/libreplan,PaulLuchyn/libreplan,dgray16/libreplan,PaulLuchyn/libreplan,poum/libreplan,skylow95/libreplan,dgray16/libreplan,PaulLuchyn/libreplan,Marine-22/libre,Marine-22/libre,PaulLuchyn/libreplan,LibrePlan/libreplan,poum/libreplan,PaulLuchyn/libreplan,dgray16/libreplan,PaulLuchyn/libreplan,skylow95/libreplan,poum/libreplan,dgray16/libreplan
/* * This file is part of NavalPlan * * Copyright (C) 2009-2010 Fundación para o Fomento da Calidade Industrial e * Desenvolvemento Tecnolóxico de Galicia * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.navalplanner.web.orders; import static org.navalplanner.web.I18nHelper._; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.annotation.Resource; import org.apache.commons.lang.StringUtils; import org.navalplanner.business.orders.entities.Order; import org.navalplanner.business.orders.entities.OrderElement; import org.navalplanner.business.orders.entities.OrderLine; import org.navalplanner.business.orders.entities.OrderLineGroup; import org.navalplanner.business.orders.entities.SchedulingState; import org.navalplanner.business.requirements.entities.CriterionRequirement; import org.navalplanner.business.templates.entities.OrderElementTemplate; import org.navalplanner.web.common.IMessagesForUser; import org.navalplanner.web.common.Level; import org.navalplanner.web.common.Util; import org.navalplanner.web.common.Util.Getter; import org.navalplanner.web.common.Util.Setter; import org.navalplanner.web.common.components.bandboxsearch.BandboxMultipleSearch; import org.navalplanner.web.common.components.bandboxsearch.BandboxSearch; import org.navalplanner.web.common.components.finders.FilterPair; import org.navalplanner.web.orders.assigntemplates.TemplateFinderPopup; import org.navalplanner.web.orders.assigntemplates.TemplateFinderPopup.IOnResult; import org.navalplanner.web.templates.IOrderTemplatesControllerEntryPoints; import org.navalplanner.web.tree.TreeController; import org.zkoss.ganttz.IPredicate; import org.zkoss.ganttz.util.ComponentsFinder; import org.zkoss.zk.ui.Component; import org.zkoss.zk.ui.Executions; import org.zkoss.zk.ui.WrongValueException; import org.zkoss.zk.ui.event.Event; import org.zkoss.zk.ui.event.EventListener; import org.zkoss.zk.ui.event.KeyEvent; import org.zkoss.zul.Button; import org.zkoss.zul.Constraint; import org.zkoss.zul.Datebox; import org.zkoss.zul.Intbox; import org.zkoss.zul.Messagebox; import org.zkoss.zul.Tab; import org.zkoss.zul.Textbox; import org.zkoss.zul.Treechildren; import org.zkoss.zul.Treeitem; import org.zkoss.zul.Vbox; import org.zkoss.zul.api.Treecell; import org.zkoss.zul.api.Treerow; import org.zkoss.zul.impl.api.InputElement; /** * Controller for {@link OrderElement} tree view of {@link Order} entities <br /> * @author Lorenzo Tilve Álvaro <[email protected]> * @author Manuel Rego Casasnovas <[email protected]> * @author Susana Montes Pedreira <[email protected]> */ public class OrderElementTreeController extends TreeController<OrderElement> { private Vbox orderElementFilter; private BandboxMultipleSearch bdFiltersOrderElement; private Datebox filterStartDateOrderElement; private Datebox filterFinishDateOrderElement; private Textbox filterNameOrderElement; private OrderElementTreeitemRenderer renderer = new OrderElementTreeitemRenderer(); private final IOrderModel orderModel; private final OrderElementController orderElementController; private transient IPredicate predicate; @Resource private IOrderTemplatesControllerEntryPoints orderTemplates; private final IMessagesForUser messagesForUser; public List<org.navalplanner.business.labels.entities.Label> getLabels() { return orderModel.getLabels(); } @Override public OrderElementTreeitemRenderer getRenderer() { return renderer; } public OrderElementTreeController(IOrderModel orderModel, OrderElementController orderElementController, IMessagesForUser messagesForUser) { super(OrderElement.class); this.orderModel = orderModel; this.orderElementController = orderElementController; this.messagesForUser = messagesForUser; } public OrderElementController getOrderElementController() { return orderElementController; } @Override protected OrderElementTreeModel getModel() { return orderModel.getOrderElementTreeModel(); } public void createTemplateFromSelectedOrderElement() { if (tree.getSelectedCount() == 1) { createTemplate(getSelectedNode()); } else { showSelectAnElementMessageBox(); } } public void editSelectedOrderElement() { if (tree.getSelectedCount() == 1) { showEditionOrderElement(tree.getSelectedItem()); } else { showSelectAnElementMessageBox(); } } public void moveSelectedOrderElementUp() { if (tree.getSelectedCount() == 1) { Treeitem item = tree.getSelectedItem(); up((OrderElement)item.getValue()); Treeitem brother = (Treeitem) item.getPreviousSibling(); if (brother != null) { brother.setSelected(true); } } else { showSelectAnElementMessageBox(); } } public void moveSelectedOrderElementDown() { if (tree.getSelectedCount() == 1) { Treeitem item = tree.getSelectedItem(); down((OrderElement)item.getValue()); Treeitem brother = (Treeitem) item.getNextSibling(); if (brother != null) { brother.setSelected(true); } } else { showSelectAnElementMessageBox(); } } public void indentSelectedOrderElement() { if (tree.getSelectedCount() == 1) { indent(getSelectedNode()); } else { showSelectAnElementMessageBox(); } } public void unindentSelectedOrderElement() { if (tree.getSelectedCount() == 1) { unindent(getSelectedNode()); } else { showSelectAnElementMessageBox(); } } public void deleteSelectedOrderElement() { if (tree.getSelectedCount() == 1) { remove(getSelectedNode()); } else { showSelectAnElementMessageBox(); } } private void showSelectAnElementMessageBox() { try { Messagebox.show(_("Choose a task " + "to operate on it")); } catch (InterruptedException e) { throw new RuntimeException(e); } } private boolean isTemplateCreationConfirmed() { try { int status = Messagebox .show( _("Still not saved changes would be lost." + " Are you sure you want to go to create a template?"), "Confirm", Messagebox.YES | Messagebox.NO, Messagebox.QUESTION); return Messagebox.YES == status; } catch (InterruptedException e) { throw new RuntimeException(e); } } public void createFromTemplate() { templateFinderPopup.openForSubElemenetCreation(tree, "after_pointer", new IOnResult<OrderElementTemplate>() { @Override public void found(OrderElementTemplate template) { OrderLineGroup parent = (OrderLineGroup) getModel() .getRoot(); orderModel.createFrom(parent, template); getModel().addNewlyAddedChildrenOf(parent); } }); } private void createTemplate(OrderElement selectedNode) { if (!isTemplateCreationConfirmed()) { return; } if (!selectedNode.isNewObject()) { orderTemplates.goToCreateTemplateFrom(selectedNode); } else { notifyTemplateCantBeCreated(); } } private void notifyTemplateCantBeCreated() { try { Messagebox .show( _("Templates can only be created from already existent tasks.\n" + "Newly tasks cannot be used."), _("Operation cannot be done"), Messagebox.OK, Messagebox.INFORMATION); } catch (InterruptedException e) { throw new RuntimeException(e); } } private void notifyDateboxCantBeCreated(final String dateboxName, final String codeOrderElement) { try { Messagebox.show(_("the " + dateboxName + "datebox of the task " + codeOrderElement + " could not be created.\n"), _("Operation cannot be done"), Messagebox.OK, Messagebox.INFORMATION); } catch (InterruptedException e) { } } protected void filterByPredicateIfAny() { if (predicate != null) { filterByPredicate(); } } private void filterByPredicate() { OrderElementTreeModel orderElementTreeModel = orderModel .getOrderElementsFilteredByPredicate(predicate); tree.setModel(orderElementTreeModel.asTree()); tree.invalidate(); } void doEditFor(Order order) { Util.reloadBindings(tree); } public void disabledCodeBoxes(boolean disabled) { Set<Treeitem> childrenSet = new HashSet<Treeitem>(); Treechildren treeChildren = tree.getTreechildren(); if (treeChildren != null) { childrenSet.addAll((Collection<Treeitem>) treeChildren.getItems()); } for (Treeitem each : childrenSet) { disableCodeBoxes(each, disabled); } } private void disableCodeBoxes(Treeitem item, boolean disabled) { Treerow row = item.getTreerow(); InputElement codeBox = (InputElement) ((Treecell) row.getChildren() .get(1)).getChildren().get(0); codeBox.setDisabled(disabled); codeBox.invalidate(); Set<Treeitem> childrenSet = new HashSet<Treeitem>(); Treechildren children = item.getTreechildren(); if (children != null) { childrenSet.addAll((Collection<Treeitem>) children.getItems()); } for (Treeitem each : childrenSet) { disableCodeBoxes(each, disabled); } } @Override public void doAfterCompose(Component comp) throws Exception { super.doAfterCompose(comp); orderElementFilter.getChildren().clear(); appendExpandCollapseButton(); // Configuration of the order elements filter Component filterComponent = Executions.createComponents( "/orders/_orderElementTreeFilter.zul", orderElementFilter, new HashMap<String, String>()); filterComponent.setVariable("treeController", this, true); bdFiltersOrderElement = (BandboxMultipleSearch) filterComponent .getFellow("bdFiltersOrderElement"); filterStartDateOrderElement = (Datebox) filterComponent .getFellow("filterStartDateOrderElement"); filterFinishDateOrderElement = (Datebox) filterComponent .getFellow("filterFinishDateOrderElement"); filterNameOrderElement = (Textbox) filterComponent .getFellow("filterNameOrderElement"); templateFinderPopup = (TemplateFinderPopup) comp .getFellow("templateFinderPopupAtTree"); } private void appendExpandCollapseButton() { List<Component> children = orderElementFilter.getParent().getChildren(); // Is already added? Button button = (Button) ComponentsFinder.findById("expandAllButton", children); if (button != null) { return; } // Append expand/collapse button final Button expandAllButton = new Button(); expandAllButton.setId("expandAllButton"); expandAllButton.setClass("planner-command"); expandAllButton.setTooltiptext(_("Expand/Collapse all")); expandAllButton.setImage("/common/img/ico_expand.png"); expandAllButton.addEventListener("onClick", new EventListener() { @Override public void onEvent(Event event) throws Exception { if (expandAllButton.getSclass().equals("planner-command")) { expandAll(); expandAllButton.setSclass("planner-command clicked"); } else { collapseAll(); expandAllButton.setSclass("planner-command"); } } }); children.add(expandAllButton); } public void expandAll() { Set<Treeitem> childrenSet = new HashSet<Treeitem>(); Treechildren children = tree.getTreechildren(); if(children != null) { childrenSet.addAll((Collection<Treeitem>) children.getItems()); } for(Treeitem each: childrenSet) { expandAll(each); } } private void expandAll(Treeitem item) { item.setOpen(true); Set<Treeitem> childrenSet = new HashSet<Treeitem>(); Treechildren children = item.getTreechildren(); if(children != null) { childrenSet.addAll((Collection<Treeitem>) children.getItems()); } for(Treeitem each: childrenSet) { expandAll(each); } } public void collapseAll() { Treechildren children = tree.getTreechildren(); for(Treeitem each: (Collection<Treeitem>) children.getItems()) { each.setOpen(false); } } private enum Navigation { LEFT, UP, RIGHT, DOWN; public static Navigation getIntentFrom(KeyEvent keyEvent) { return values()[keyEvent.getKeyCode() - 37]; } } private Map<OrderElement, Textbox> orderElementCodeTextboxes = new HashMap<OrderElement, Textbox>(); public Map<OrderElement, Textbox> getOrderElementCodeTextboxes() { return orderElementCodeTextboxes; } public class OrderElementTreeitemRenderer extends Renderer { private class KeyboardNavigationHandler { private Map<Treerow, List<InputElement>> navigableElementsByRow = new HashMap<Treerow, List<InputElement>>(); void register(final InputElement inputElement) { inputElement.setCtrlKeys("#up#down"); registerNavigableElement(inputElement); inputElement.addEventListener("onCtrlKey", new EventListener() { private Treerow treerow = getCurrentTreeRow(); @Override public void onEvent(Event event) throws Exception { Navigation navigation = Navigation .getIntentFrom((KeyEvent) event); moveFocusTo(inputElement, navigation, treerow); } }); } private void registerNavigableElement(InputElement inputElement) { Treerow treeRow = getCurrentTreeRow(); if (!navigableElementsByRow.containsKey(treeRow)) { navigableElementsByRow.put(treeRow, new ArrayList<InputElement>()); } navigableElementsByRow.get(treeRow).add(inputElement); } private void moveFocusTo(InputElement inputElement, Navigation navigation, Treerow treerow) { List<InputElement> boxes = getNavigableElements(treerow); int position = boxes.indexOf(inputElement); switch (navigation) { case UP: focusGoUp(treerow, position); break; case DOWN: focusGoDown(treerow, position); break; case LEFT: if (position == 0) { focusGoUp(treerow, boxes.size() - 1); } else { if (boxes.get(position - 1).isDisabled()) { moveFocusTo(boxes.get(position - 1), Navigation.LEFT, treerow); } else { boxes.get(position - 1).focus(); } } break; case RIGHT: if (position == boxes.size() - 1) { focusGoDown(treerow, 0); } else { if (boxes.get(position + 1).isDisabled()) { moveFocusTo(boxes.get(position + 1), Navigation.RIGHT, treerow); } else { boxes.get(position + 1).focus(); } } break; } } private void focusGoUp(Treerow treerow, int position) { Treeitem parent = (Treeitem) treerow.getParent(); List treeItems = parent.getParent().getChildren(); int myPosition = parent.indexOf(); if (myPosition > 0) { // the current node is not the first brother Treechildren treechildren = ((Treeitem) treeItems .get(myPosition - 1)).getTreechildren(); if (treechildren == null || treechildren.getChildren().size() == 0) { // the previous brother doesn't have children, // or it has children but they are unloaded Treerow upTreerow = ((Treeitem) treeItems .get(myPosition - 1)).getTreerow(); focusCorrectBox(upTreerow, position, Navigation.LEFT); } else { // we have to move to the last child of the previous // brother Treerow upTreerow = findLastTreerow((Treeitem) treeItems .get(myPosition - 1)); while (!upTreerow.isVisible()) { upTreerow = (Treerow) ((Treeitem) upTreerow .getParent().getParent().getParent()) .getTreerow(); } focusCorrectBox(upTreerow, position, Navigation.LEFT); } } else { // the node is the first brother if (parent.getParent().getParent() instanceof Treeitem) { // the node has a parent, so we move up to it Treerow upTreerow = ((Treeitem) parent.getParent() .getParent()).getTreerow(); focusCorrectBox(upTreerow, position, Navigation.LEFT); } } } private Treerow findLastTreerow(Treeitem item) { if (item.getTreechildren() == null) { return item.getTreerow(); } List children = item.getTreechildren().getChildren(); Treeitem lastchild = (Treeitem) children .get(children.size() - 1); return findLastTreerow(lastchild); } private void focusGoDown(Treerow treerow, int position) { Treeitem parent = (Treeitem) treerow.getParent(); focusGoDown(parent, position, false); } private void focusGoDown(Treeitem parent, int position, boolean skipChildren) { if (parent.getTreechildren() == null || skipChildren) { // Moving from a node to its brother List treeItems = parent.getParent().getChildren(); int myPosition = parent.indexOf(); if (myPosition < treeItems.size() - 1) { // the current node is not the last one Treerow downTreerow = ((Treeitem) treeItems .get(myPosition + 1)).getTreerow(); focusCorrectBox(downTreerow, position, Navigation.RIGHT); } else { // the node is the last brother if (parent.getParent().getParent() instanceof Treeitem) { focusGoDown((Treeitem) parent.getParent() .getParent(), position, true); } } } else { // Moving from a parent node to its children Treechildren treechildren = parent.getTreechildren(); if (treechildren.getChildren().size() == 0) { // the children are unloaded yet focusGoDown(parent, position, true); return; } Treerow downTreerow = ((Treeitem) treechildren .getChildren().get(0)).getTreerow(); if (!downTreerow.isVisible()) { // children are loaded but not visible focusGoDown(parent, position, true); return; } focusCorrectBox(downTreerow, position, Navigation.RIGHT); } } private void focusCorrectBox(Treerow treerow, int position, Navigation whereIfDisabled) { List<InputElement> boxes = getNavigableElements(treerow); if (boxes.get(position).isDisabled()) { moveFocusTo(boxes.get(position), whereIfDisabled, treerow); } else { boxes.get(position).focus(); } } private List<InputElement> getNavigableElements(Treerow row) { if (!navigableElementsByRow.containsKey(row)) { return Collections.emptyList(); } return Collections.unmodifiableList(navigableElementsByRow .get(row)); } } private Map<OrderElement, Intbox> hoursIntBoxByOrderElement = new HashMap<OrderElement, Intbox>(); private KeyboardNavigationHandler navigationHandler = new KeyboardNavigationHandler(); public OrderElementTreeitemRenderer() { } @Override protected void addDescriptionCell(OrderElement element) { addTaskNameCell(element); } private void addTaskNameCell(final OrderElement orderElementForThisRow) { int[] path = getModel().getPath(orderElementForThisRow); String cssClass = "depth_" + path.length; Textbox textBox = Util.bind(new Textbox(), new Util.Getter<String>() { @Override public String get() { return orderElementForThisRow.getName(); } }, new Util.Setter<String>() { @Override public void set(String value) { orderElementForThisRow.setName(value); } }); if (readOnly) { textBox.setDisabled(true); } addCell(cssClass, textBox); navigationHandler.register(textBox); } @Override protected SchedulingState getSchedulingStateFrom( OrderElement currentElement) { return currentElement.getSchedulingState(); } @Override protected void onDoubleClickForSchedulingStateCell( final OrderElement currentOrderElement) { IOrderElementModel model = orderModel .getOrderElementModel(currentOrderElement); orderElementController.openWindow(model); updateOrderElementHours(currentOrderElement); } protected void addCodeCell(final OrderElement orderElement) { Textbox textBoxCode = new Textbox(); Util.bind(textBoxCode, new Util.Getter<String>() { @Override public String get() { return orderElement.getCode(); } }, new Util.Setter<String>() { @Override public void set(String value) { orderElement.setCode(value); } }); textBoxCode.setConstraint(new Constraint() { @Override public void validate(Component comp, Object value) throws WrongValueException { if (!orderElement.isFormatCodeValid((String) value)) { throw new WrongValueException( comp, _("Value is not valid.\n Code cannot contain chars like '_' \n and should not be empty")); } } }); if (orderModel.isCodeAutogenerated() || readOnly) { textBoxCode.setDisabled(true); } addCell(textBoxCode); navigationHandler.register(textBoxCode); orderElementCodeTextboxes.put(orderElement, textBoxCode); } void addInitDateCell(final OrderElement currentOrderElement) { DynamicDatebox dinamicDatebox = new DynamicDatebox( currentOrderElement, new DynamicDatebox.Getter<Date>() { @Override public Date get() { return currentOrderElement.getInitDate(); } }, new DynamicDatebox.Setter<Date>() { @Override public void set(Date value) { currentOrderElement.setInitDate(value); } }); if (readOnly) { dinamicDatebox.setDisabled(true); } addDateCell(dinamicDatebox, _("init"), currentOrderElement); navigationHandler.register(dinamicDatebox .getDateTextBox()); } void addEndDateCell(final OrderElement currentOrderElement) { DynamicDatebox dinamicDatebox = new DynamicDatebox( currentOrderElement, new DynamicDatebox.Getter<Date>() { @Override public Date get() { return currentOrderElement.getDeadline(); } }, new DynamicDatebox.Setter<Date>() { @Override public void set(Date value) { currentOrderElement.setDeadline(value); } }); if (readOnly) { dinamicDatebox.setDisabled(true); } addDateCell(dinamicDatebox, _("end"), currentOrderElement); navigationHandler.register(dinamicDatebox .getDateTextBox()); } void addHoursCell(final OrderElement currentOrderElement) { Intbox intboxHours = buildHoursIntboxFor(currentOrderElement); hoursIntBoxByOrderElement.put(currentOrderElement, intboxHours); if (readOnly) { intboxHours.setDisabled(true); } Treecell cellHours = addCell(intboxHours); setReadOnlyHoursCell(currentOrderElement, intboxHours, cellHours); navigationHandler.register(intboxHours); } private void addDateCell(final DynamicDatebox dinamicDatebox, final String dateboxName, final OrderElement currentOrderElement) { Component cell = Executions.getCurrent().createComponents( "/common/components/dynamicDatebox.zul", null, null); try { dinamicDatebox.doAfterCompose(cell); } catch (Exception e) { notifyDateboxCantBeCreated(dateboxName, currentOrderElement .getCode()); } registerFocusEvent(dinamicDatebox.getDateTextBox()); addCell(cell); } private Intbox buildHoursIntboxFor( final OrderElement currentOrderElement) { Intbox result = new Intbox(); if (currentOrderElement instanceof OrderLine) { OrderLine orderLine = (OrderLine) currentOrderElement; Util.bind(result, getHoursGetterFor(currentOrderElement), getHoursSetterFor(orderLine)); result.setConstraint(getHoursConstraintFor(orderLine)); } else { // If it's a container hours cell is not editable Util.bind(result, getHoursGetterFor(currentOrderElement)); } return result; } private Getter<Integer> getHoursGetterFor( final OrderElement currentOrderElement) { return new Util.Getter<Integer>() { @Override public Integer get() { return currentOrderElement.getWorkHours(); } }; } private Constraint getHoursConstraintFor(final OrderLine orderLine) { return new Constraint() { @Override public void validate(Component comp, Object value) throws WrongValueException { if (!orderLine.isTotalHoursValid((Integer) value)) { throw new WrongValueException( comp, _("Value is not valid, taking into account the current list of HoursGroup")); } } }; } private Setter<Integer> getHoursSetterFor(final OrderLine orderLine) { return new Util.Setter<Integer>() { @Override public void set(Integer value) { orderLine.setWorkHours(value); List<OrderElement> parentNodes = getModel().getParents( orderLine); // Remove the last element because it's an // Order node, not an OrderElement parentNodes.remove(parentNodes.size() - 1); for (OrderElement node : parentNodes) { Intbox intbox = hoursIntBoxByOrderElement.get(node); intbox.setValue(node.getWorkHours()); } } }; } @Override protected void addOperationsCell(final Treeitem item, final OrderElement currentOrderElement) { addCell(createEditButton(currentOrderElement, item), createRemoveButton(currentOrderElement)); } private Button createEditButton(final OrderElement currentOrderElement, final Treeitem item) { Button editbutton = createButton("/common/img/ico_editar1.png", _("Edit"), "/common/img/ico_editar.png", "icono", new EventListener() { @Override public void onEvent(Event event) throws Exception { showEditionOrderElement(item); } }); return editbutton; } } @Override protected boolean isPredicateApplied() { return predicate != null; } /** * Apply filter to order elements in current order */ public void onApplyFilter() { OrderElementPredicate predicate = createPredicate(); this.predicate = predicate; if (predicate != null) { filterByPredicate(predicate); } else { showAllOrderElements(); } } private OrderElementPredicate createPredicate() { List<FilterPair> listFilters = (List<FilterPair>) bdFiltersOrderElement .getSelectedElements(); Date startDate = filterStartDateOrderElement.getValue(); Date finishDate = filterFinishDateOrderElement.getValue(); String name = filterNameOrderElement.getValue(); if (listFilters.isEmpty() && startDate == null && finishDate == null && name == null) { return null; } return new OrderElementPredicate(listFilters, startDate, finishDate, name); } private void filterByPredicate(OrderElementPredicate predicate) { OrderElementTreeModel orderElementTreeModel = orderModel .getOrderElementsFilteredByPredicate(predicate); tree.setModel(orderElementTreeModel.asTree()); tree.invalidate(); } public void showAllOrderElements() { this.predicate = null; tree.setModel(orderModel.getOrderElementTreeModel().asTree()); tree.invalidate(); } @Override protected boolean isNewButtonDisabled() { if(readOnly) { return true; } return isPredicateApplied(); } /** * Clear {@link BandboxSearch} for Labels, and initializes * {@link IPredicate} */ public void clear() { selectDefaultTab(); bdFiltersOrderElement.clear(); predicate = null; } Tab tabGeneralData; private TemplateFinderPopup templateFinderPopup; private void selectDefaultTab() { tabGeneralData.setSelected(true); } @Override protected String createTooltipText(OrderElement elem) { StringBuilder tooltipText = new StringBuilder(); tooltipText.append(elem.getName() + ". "); if ((elem.getDescription() != null) && (!elem.getDescription().equals(""))) { tooltipText.append(elem.getDescription()); tooltipText.append(". "); } if ((elem.getLabels() != null) && (!elem.getLabels().isEmpty())) { tooltipText.append(" " + _("Labels") + ":"); tooltipText.append(StringUtils.join(elem.getLabels(), ",")); tooltipText.append("."); } if ((elem.getCriterionRequirements() != null) && (!elem.getCriterionRequirements().isEmpty())) { ArrayList<String> criterionNames = new ArrayList<String>(); for(CriterionRequirement each:elem.getCriterionRequirements()) { if (each.isValid()) { criterionNames.add(each.getCriterion().getName()); } } if (!criterionNames.isEmpty()) { tooltipText.append(" " + _("Criteria") + ":"); tooltipText.append(StringUtils.join(criterionNames, ",")); tooltipText.append("."); } } // To calculate other unit advances implement // getOtherAdvancesPercentage() tooltipText.append(" " + _("Advance") + ":" + elem.getAdvancePercentage()); tooltipText.append("."); // tooltipText.append(elem.getAdvancePercentage()); return tooltipText.toString(); } public void showEditionOrderElement(final Treeitem item) { OrderElement currentOrderElement = (OrderElement) item.getValue(); markModifiedTreeitem(item.getTreerow()); IOrderElementModel model = orderModel .getOrderElementModel(currentOrderElement); orderElementController.openWindow(model); updateOrderElementHours(currentOrderElement); } private void updateOrderElementHours(OrderElement orderElement) { if ((!readOnly) && (orderElement instanceof OrderLine)) { Intbox boxHours = (Intbox) getRenderer().hoursIntBoxByOrderElement .get(orderElement); boxHours.setValue(orderElement.getWorkHours()); Treecell tc = (Treecell) boxHours.getParent(); setReadOnlyHoursCell(orderElement, boxHours, tc); boxHours.invalidate(); } } private void setReadOnlyHoursCell(OrderElement orderElement, Intbox boxHours, Treecell tc) { if ((!readOnly) && (orderElement instanceof OrderLine)) { if (orderElement.getHoursGroups().size() > 1) { boxHours.setReadonly(true); tc .setTooltiptext(_("Not editable for containing more that an hours group.")); } else { boxHours.setReadonly(false); tc.setTooltiptext(""); } } } public Treeitem getTreeitemByOrderElement(OrderElement element) { List<Treeitem> listItems = new ArrayList<Treeitem>(this.tree.getItems()); for (Treeitem item : listItems) { OrderElement orderElement = (OrderElement) item.getValue(); if (orderElement.getId().equals(element.getId())) { return item; } } return null; } /** * Operations to filter the orders by multiple filters */ public Constraint checkConstraintFinishDate() { return new Constraint() { @Override public void validate(Component comp, Object value) throws WrongValueException { Date finishDate = (Date) value; if ((finishDate != null) && (filterStartDateOrderElement.getValue() != null) && (finishDate.compareTo(filterStartDateOrderElement .getValue()) < 0)) { filterFinishDateOrderElement.setValue(null); throw new WrongValueException(comp, _("must be greater than start date")); } } }; } public Constraint checkConstraintStartDate() { return new Constraint() { @Override public void validate(Component comp, Object value) throws WrongValueException { Date startDate = (Date) value; if ((startDate != null) && (filterFinishDateOrderElement.getValue() != null) && (startDate.compareTo(filterFinishDateOrderElement .getValue()) > 0)) { filterStartDateOrderElement.setValue(null); throw new WrongValueException(comp, _("must be lower than finish date")); } } }; } @Override protected void remove(OrderElement element) { boolean alreadyInUse = orderModel.isAlreadyInUse(element); if (alreadyInUse) { messagesForUser .showMessage( Level.ERROR, _( "You can not remove the task \"{0}\" because of this or any of its children are already in use in some work reports", element.getName())); } else { super.remove(element); orderElementCodeTextboxes.remove(element); } } @Override protected void refreshHoursBox(OrderElement node) { List<OrderElement> parentNodes = getModel().getParents(node); // Remove the last element because it's an // Order node, not an OrderElement parentNodes.remove(parentNodes.size() - 1); for (OrderElement parent : parentNodes) { getRenderer().hoursIntBoxByOrderElement.get(parent) .setValue(parent.getWorkHours()); } } }
navalplanner-webapp/src/main/java/org/navalplanner/web/orders/OrderElementTreeController.java
/* * This file is part of NavalPlan * * Copyright (C) 2009-2010 Fundación para o Fomento da Calidade Industrial e * Desenvolvemento Tecnolóxico de Galicia * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.navalplanner.web.orders; import static org.navalplanner.web.I18nHelper._; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.annotation.Resource; import org.apache.commons.lang.StringUtils; import org.navalplanner.business.orders.entities.Order; import org.navalplanner.business.orders.entities.OrderElement; import org.navalplanner.business.orders.entities.OrderLine; import org.navalplanner.business.orders.entities.OrderLineGroup; import org.navalplanner.business.orders.entities.SchedulingState; import org.navalplanner.business.requirements.entities.CriterionRequirement; import org.navalplanner.business.templates.entities.OrderElementTemplate; import org.navalplanner.web.common.IMessagesForUser; import org.navalplanner.web.common.Level; import org.navalplanner.web.common.Util; import org.navalplanner.web.common.Util.Getter; import org.navalplanner.web.common.Util.Setter; import org.navalplanner.web.common.components.bandboxsearch.BandboxMultipleSearch; import org.navalplanner.web.common.components.bandboxsearch.BandboxSearch; import org.navalplanner.web.common.components.finders.FilterPair; import org.navalplanner.web.orders.assigntemplates.TemplateFinderPopup; import org.navalplanner.web.orders.assigntemplates.TemplateFinderPopup.IOnResult; import org.navalplanner.web.templates.IOrderTemplatesControllerEntryPoints; import org.navalplanner.web.tree.TreeController; import org.zkoss.ganttz.IPredicate; import org.zkoss.ganttz.util.ComponentsFinder; import org.zkoss.zk.ui.Component; import org.zkoss.zk.ui.Executions; import org.zkoss.zk.ui.WrongValueException; import org.zkoss.zk.ui.event.Event; import org.zkoss.zk.ui.event.EventListener; import org.zkoss.zk.ui.event.KeyEvent; import org.zkoss.zul.Button; import org.zkoss.zul.Constraint; import org.zkoss.zul.Datebox; import org.zkoss.zul.Intbox; import org.zkoss.zul.Messagebox; import org.zkoss.zul.Tab; import org.zkoss.zul.Textbox; import org.zkoss.zul.Treechildren; import org.zkoss.zul.Treeitem; import org.zkoss.zul.Vbox; import org.zkoss.zul.api.Treecell; import org.zkoss.zul.api.Treerow; import org.zkoss.zul.impl.api.InputElement; /** * Controller for {@link OrderElement} tree view of {@link Order} entities <br /> * @author Lorenzo Tilve Álvaro <[email protected]> * @author Manuel Rego Casasnovas <[email protected]> * @author Susana Montes Pedreira <[email protected]> */ public class OrderElementTreeController extends TreeController<OrderElement> { private Vbox orderElementFilter; private BandboxMultipleSearch bdFiltersOrderElement; private Datebox filterStartDateOrderElement; private Datebox filterFinishDateOrderElement; private Textbox filterNameOrderElement; private OrderElementTreeitemRenderer renderer = new OrderElementTreeitemRenderer(); private final IOrderModel orderModel; private final OrderElementController orderElementController; private transient IPredicate predicate; @Resource private IOrderTemplatesControllerEntryPoints orderTemplates; private final IMessagesForUser messagesForUser; public List<org.navalplanner.business.labels.entities.Label> getLabels() { return orderModel.getLabels(); } @Override public OrderElementTreeitemRenderer getRenderer() { return renderer; } public OrderElementTreeController(IOrderModel orderModel, OrderElementController orderElementController, IMessagesForUser messagesForUser) { super(OrderElement.class); this.orderModel = orderModel; this.orderElementController = orderElementController; this.messagesForUser = messagesForUser; } public OrderElementController getOrderElementController() { return orderElementController; } @Override protected OrderElementTreeModel getModel() { return orderModel.getOrderElementTreeModel(); } public void createTemplateFromSelectedOrderElement() { if (tree.getSelectedCount() == 1) { createTemplate(getSelectedNode()); } else { showSelectAnElementMessageBox(); } } public void editSelectedOrderElement() { if (tree.getSelectedCount() == 1) { showEditionOrderElement(tree.getSelectedItem()); } else { showSelectAnElementMessageBox(); } } public void moveSelectedOrderElementUp() { if (tree.getSelectedCount() == 1) { Treeitem item = tree.getSelectedItem(); up((OrderElement)item.getValue()); Treeitem brother = (Treeitem) item.getPreviousSibling(); if (brother != null) { brother.setSelected(true); } } else { showSelectAnElementMessageBox(); } } public void moveSelectedOrderElementDown() { if (tree.getSelectedCount() == 1) { Treeitem item = tree.getSelectedItem(); down((OrderElement)item.getValue()); Treeitem brother = (Treeitem) item.getNextSibling(); if (brother != null) { brother.setSelected(true); } } else { showSelectAnElementMessageBox(); } } public void indentSelectedOrderElement() { if (tree.getSelectedCount() == 1) { indent(getSelectedNode()); } else { showSelectAnElementMessageBox(); } } public void unindentSelectedOrderElement() { if (tree.getSelectedCount() == 1) { unindent(getSelectedNode()); } else { showSelectAnElementMessageBox(); } } public void deleteSelectedOrderElement() { if (tree.getSelectedCount() == 1) { remove(getSelectedNode()); } else { showSelectAnElementMessageBox(); } } private void showSelectAnElementMessageBox() { try { Messagebox.show(_("Choose a task " + "to operate on it")); } catch (InterruptedException e) { throw new RuntimeException(e); } } private boolean isTemplateCreationConfirmed() { try { int status = Messagebox .show( _("Still not saved changes would be lost." + " Are you sure you want to go to create a template?"), "Confirm", Messagebox.YES | Messagebox.NO, Messagebox.QUESTION); return Messagebox.YES == status; } catch (InterruptedException e) { throw new RuntimeException(e); } } public void createFromTemplate() { templateFinderPopup.openForSubElemenetCreation(tree, "after_pointer", new IOnResult<OrderElementTemplate>() { @Override public void found(OrderElementTemplate template) { OrderLineGroup parent = (OrderLineGroup) getModel() .getRoot(); orderModel.createFrom(parent, template); getModel().addNewlyAddedChildrenOf(parent); } }); } private void createTemplate(OrderElement selectedNode) { if (!isTemplateCreationConfirmed()) { return; } if (!selectedNode.isNewObject()) { orderTemplates.goToCreateTemplateFrom(selectedNode); } else { notifyTemplateCantBeCreated(); } } private void notifyTemplateCantBeCreated() { try { Messagebox .show( _("Templates can only be created from already existent tasks.\n" + "Newly tasks cannot be used."), _("Operation cannot be done"), Messagebox.OK, Messagebox.INFORMATION); } catch (InterruptedException e) { throw new RuntimeException(e); } } private void notifyDateboxCantBeCreated(final String dateboxName, final String codeOrderElement) { try { Messagebox.show(_("the " + dateboxName + "datebox of the task " + codeOrderElement + " could not be created.\n"), _("Operation cannot be done"), Messagebox.OK, Messagebox.INFORMATION); } catch (InterruptedException e) { } } protected void filterByPredicateIfAny() { if (predicate != null) { filterByPredicate(); } } private void filterByPredicate() { OrderElementTreeModel orderElementTreeModel = orderModel .getOrderElementsFilteredByPredicate(predicate); tree.setModel(orderElementTreeModel.asTree()); tree.invalidate(); } void doEditFor(Order order) { Util.reloadBindings(tree); } public void disabledCodeBoxes(boolean disabled) { Set<Treeitem> childrenSet = new HashSet<Treeitem>(); Treechildren treeChildren = tree.getTreechildren(); if (treeChildren != null) { childrenSet.addAll((Collection<Treeitem>) treeChildren.getItems()); } for (Treeitem each : childrenSet) { disableCodeBoxes(each, disabled); } } private void disableCodeBoxes(Treeitem item, boolean disabled) { Treerow row = item.getTreerow(); InputElement codeBox = (InputElement) ((Treecell) row.getChildren() .get(1)).getChildren().get(0); codeBox.setDisabled(disabled); codeBox.invalidate(); Set<Treeitem> childrenSet = new HashSet<Treeitem>(); Treechildren children = item.getTreechildren(); if (children != null) { childrenSet.addAll((Collection<Treeitem>) children.getItems()); } for (Treeitem each : childrenSet) { disableCodeBoxes(each, disabled); } } @Override public void doAfterCompose(Component comp) throws Exception { super.doAfterCompose(comp); orderElementFilter.getChildren().clear(); appendExpandCollapseButton(); // Configuration of the order elements filter Component filterComponent = Executions.createComponents( "/orders/_orderElementTreeFilter.zul", orderElementFilter, new HashMap<String, String>()); filterComponent.setVariable("treeController", this, true); bdFiltersOrderElement = (BandboxMultipleSearch) filterComponent .getFellow("bdFiltersOrderElement"); filterStartDateOrderElement = (Datebox) filterComponent .getFellow("filterStartDateOrderElement"); filterFinishDateOrderElement = (Datebox) filterComponent .getFellow("filterFinishDateOrderElement"); filterNameOrderElement = (Textbox) filterComponent .getFellow("filterNameOrderElement"); templateFinderPopup = (TemplateFinderPopup) comp .getFellow("templateFinderPopupAtTree"); } private void appendExpandCollapseButton() { List<Component> children = orderElementFilter.getParent().getChildren(); // Is already added? Button button = (Button) ComponentsFinder.findById("expandAllButton", children); if (button != null) { return; } // Append expand/collapse button final Button expandAllButton = new Button(); expandAllButton.setId("expandAllButton"); expandAllButton.setClass("planner-command"); expandAllButton.setTooltiptext(_("Expand/Collapse all")); expandAllButton.setImage("/common/img/ico_expand.png"); expandAllButton.addEventListener("onClick", new EventListener() { @Override public void onEvent(Event event) throws Exception { if (expandAllButton.getSclass().equals("planner-command")) { expandAll(); expandAllButton.setSclass("planner-command clicked"); } else { collapseAll(); expandAllButton.setSclass("planner-command"); } } }); children.add(expandAllButton); } public void expandAll() { Set<Treeitem> childrenSet = new HashSet<Treeitem>(); Treechildren children = tree.getTreechildren(); if(children != null) { childrenSet.addAll((Collection<Treeitem>) children.getItems()); } for(Treeitem each: childrenSet) { expandAll(each); } } private void expandAll(Treeitem item) { item.setOpen(true); Set<Treeitem> childrenSet = new HashSet<Treeitem>(); Treechildren children = item.getTreechildren(); if(children != null) { childrenSet.addAll((Collection<Treeitem>) children.getItems()); } for(Treeitem each: childrenSet) { expandAll(each); } } public void collapseAll() { Treechildren children = tree.getTreechildren(); for(Treeitem each: (Collection<Treeitem>) children.getItems()) { each.setOpen(false); } } private enum Navigation { LEFT, UP, RIGHT, DOWN; public static Navigation getIntentFrom(KeyEvent keyEvent) { return values()[keyEvent.getKeyCode() - 37]; } } private Map<OrderElement, Textbox> orderElementCodeTextboxes = new HashMap<OrderElement, Textbox>(); public Map<OrderElement, Textbox> getOrderElementCodeTextboxes() { return orderElementCodeTextboxes; } public class OrderElementTreeitemRenderer extends Renderer { private class KeyboardNavigationHandler { private Map<Treerow, List<InputElement>> navigableElementsByRow = new HashMap<Treerow, List<InputElement>>(); void registerKeyboardListener(final InputElement inputElement) { inputElement.setCtrlKeys("#up#down"); registerNavigableElement(inputElement); inputElement.addEventListener("onCtrlKey", new EventListener() { private Treerow treerow = getCurrentTreeRow(); @Override public void onEvent(Event event) throws Exception { Navigation navigation = Navigation .getIntentFrom((KeyEvent) event); moveFocusTo(inputElement, navigation, treerow); } }); } private void registerNavigableElement(InputElement inputElement) { Treerow treeRow = getCurrentTreeRow(); if (!navigableElementsByRow.containsKey(treeRow)) { navigableElementsByRow.put(treeRow, new ArrayList<InputElement>()); } navigableElementsByRow.get(treeRow).add(inputElement); } private void moveFocusTo(InputElement inputElement, Navigation navigation, Treerow treerow) { List<InputElement> boxes = getNavigableElements(treerow); int position = boxes.indexOf(inputElement); switch (navigation) { case UP: focusGoUp(treerow, position); break; case DOWN: focusGoDown(treerow, position); break; case LEFT: if (position == 0) { focusGoUp(treerow, boxes.size() - 1); } else { if (boxes.get(position - 1).isDisabled()) { moveFocusTo(boxes.get(position - 1), Navigation.LEFT, treerow); } else { boxes.get(position - 1).focus(); } } break; case RIGHT: if (position == boxes.size() - 1) { focusGoDown(treerow, 0); } else { if (boxes.get(position + 1).isDisabled()) { moveFocusTo(boxes.get(position + 1), Navigation.RIGHT, treerow); } else { boxes.get(position + 1).focus(); } } break; } } private void focusGoUp(Treerow treerow, int position) { Treeitem parent = (Treeitem) treerow.getParent(); List treeItems = parent.getParent().getChildren(); int myPosition = parent.indexOf(); if (myPosition > 0) { // the current node is not the first brother Treechildren treechildren = ((Treeitem) treeItems .get(myPosition - 1)).getTreechildren(); if (treechildren == null || treechildren.getChildren().size() == 0) { // the previous brother doesn't have children, // or it has children but they are unloaded Treerow upTreerow = ((Treeitem) treeItems .get(myPosition - 1)).getTreerow(); focusCorrectBox(upTreerow, position, Navigation.LEFT); } else { // we have to move to the last child of the previous // brother Treerow upTreerow = findLastTreerow((Treeitem) treeItems .get(myPosition - 1)); while (!upTreerow.isVisible()) { upTreerow = (Treerow) ((Treeitem) upTreerow .getParent().getParent().getParent()) .getTreerow(); } focusCorrectBox(upTreerow, position, Navigation.LEFT); } } else { // the node is the first brother if (parent.getParent().getParent() instanceof Treeitem) { // the node has a parent, so we move up to it Treerow upTreerow = ((Treeitem) parent.getParent() .getParent()).getTreerow(); focusCorrectBox(upTreerow, position, Navigation.LEFT); } } } private Treerow findLastTreerow(Treeitem item) { if (item.getTreechildren() == null) { return item.getTreerow(); } List children = item.getTreechildren().getChildren(); Treeitem lastchild = (Treeitem) children .get(children.size() - 1); return findLastTreerow(lastchild); } private void focusGoDown(Treerow treerow, int position) { Treeitem parent = (Treeitem) treerow.getParent(); focusGoDown(parent, position, false); } private void focusGoDown(Treeitem parent, int position, boolean skipChildren) { if (parent.getTreechildren() == null || skipChildren) { // Moving from a node to its brother List treeItems = parent.getParent().getChildren(); int myPosition = parent.indexOf(); if (myPosition < treeItems.size() - 1) { // the current node is not the last one Treerow downTreerow = ((Treeitem) treeItems .get(myPosition + 1)).getTreerow(); focusCorrectBox(downTreerow, position, Navigation.RIGHT); } else { // the node is the last brother if (parent.getParent().getParent() instanceof Treeitem) { focusGoDown((Treeitem) parent.getParent() .getParent(), position, true); } } } else { // Moving from a parent node to its children Treechildren treechildren = parent.getTreechildren(); if (treechildren.getChildren().size() == 0) { // the children are unloaded yet focusGoDown(parent, position, true); return; } Treerow downTreerow = ((Treeitem) treechildren .getChildren().get(0)).getTreerow(); if (!downTreerow.isVisible()) { // children are loaded but not visible focusGoDown(parent, position, true); return; } focusCorrectBox(downTreerow, position, Navigation.RIGHT); } } private void focusCorrectBox(Treerow treerow, int position, Navigation whereIfDisabled) { List<InputElement> boxes = getNavigableElements(treerow); if (boxes.get(position).isDisabled()) { moveFocusTo(boxes.get(position), whereIfDisabled, treerow); } else { boxes.get(position).focus(); } } private List<InputElement> getNavigableElements(Treerow row) { if (!navigableElementsByRow.containsKey(row)) { return Collections.emptyList(); } return Collections.unmodifiableList(navigableElementsByRow .get(row)); } } private Map<OrderElement, Intbox> hoursIntBoxByOrderElement = new HashMap<OrderElement, Intbox>(); private KeyboardNavigationHandler navigationHandler = new KeyboardNavigationHandler(); public OrderElementTreeitemRenderer() { } @Override protected void addDescriptionCell(OrderElement element) { addTaskNameCell(element); } private void addTaskNameCell(final OrderElement orderElementForThisRow) { int[] path = getModel().getPath(orderElementForThisRow); String cssClass = "depth_" + path.length; Textbox textBox = Util.bind(new Textbox(), new Util.Getter<String>() { @Override public String get() { return orderElementForThisRow.getName(); } }, new Util.Setter<String>() { @Override public void set(String value) { orderElementForThisRow.setName(value); } }); if (readOnly) { textBox.setDisabled(true); } addCell(cssClass, textBox); navigationHandler.registerKeyboardListener(textBox); } @Override protected SchedulingState getSchedulingStateFrom( OrderElement currentElement) { return currentElement.getSchedulingState(); } @Override protected void onDoubleClickForSchedulingStateCell( final OrderElement currentOrderElement) { IOrderElementModel model = orderModel .getOrderElementModel(currentOrderElement); orderElementController.openWindow(model); updateOrderElementHours(currentOrderElement); } protected void addCodeCell(final OrderElement orderElement) { Textbox textBoxCode = new Textbox(); Util.bind(textBoxCode, new Util.Getter<String>() { @Override public String get() { return orderElement.getCode(); } }, new Util.Setter<String>() { @Override public void set(String value) { orderElement.setCode(value); } }); textBoxCode.setConstraint(new Constraint() { @Override public void validate(Component comp, Object value) throws WrongValueException { if (!orderElement.isFormatCodeValid((String) value)) { throw new WrongValueException( comp, _("Value is not valid.\n Code cannot contain chars like '_' \n and should not be empty")); } } }); if (orderModel.isCodeAutogenerated() || readOnly) { textBoxCode.setDisabled(true); } addCell(textBoxCode); navigationHandler.registerKeyboardListener(textBoxCode); orderElementCodeTextboxes.put(orderElement, textBoxCode); } void addInitDateCell(final OrderElement currentOrderElement) { DynamicDatebox dinamicDatebox = new DynamicDatebox( currentOrderElement, new DynamicDatebox.Getter<Date>() { @Override public Date get() { return currentOrderElement.getInitDate(); } }, new DynamicDatebox.Setter<Date>() { @Override public void set(Date value) { currentOrderElement.setInitDate(value); } }); if (readOnly) { dinamicDatebox.setDisabled(true); } addDateCell(dinamicDatebox, _("init"), currentOrderElement); navigationHandler.registerKeyboardListener(dinamicDatebox .getDateTextBox()); } void addEndDateCell(final OrderElement currentOrderElement) { DynamicDatebox dinamicDatebox = new DynamicDatebox( currentOrderElement, new DynamicDatebox.Getter<Date>() { @Override public Date get() { return currentOrderElement.getDeadline(); } }, new DynamicDatebox.Setter<Date>() { @Override public void set(Date value) { currentOrderElement.setDeadline(value); } }); if (readOnly) { dinamicDatebox.setDisabled(true); } addDateCell(dinamicDatebox, _("end"), currentOrderElement); navigationHandler.registerKeyboardListener(dinamicDatebox .getDateTextBox()); } void addHoursCell(final OrderElement currentOrderElement) { Intbox intboxHours = buildHoursIntboxFor(currentOrderElement); hoursIntBoxByOrderElement.put(currentOrderElement, intboxHours); if (readOnly) { intboxHours.setDisabled(true); } Treecell cellHours = addCell(intboxHours); setReadOnlyHoursCell(currentOrderElement, intboxHours, cellHours); navigationHandler.registerKeyboardListener(intboxHours); } private void addDateCell(final DynamicDatebox dinamicDatebox, final String dateboxName, final OrderElement currentOrderElement) { Component cell = Executions.getCurrent().createComponents( "/common/components/dynamicDatebox.zul", null, null); try { dinamicDatebox.doAfterCompose(cell); } catch (Exception e) { notifyDateboxCantBeCreated(dateboxName, currentOrderElement .getCode()); } registerFocusEvent(dinamicDatebox.getDateTextBox()); addCell(cell); } private Intbox buildHoursIntboxFor( final OrderElement currentOrderElement) { Intbox result = new Intbox(); if (currentOrderElement instanceof OrderLine) { OrderLine orderLine = (OrderLine) currentOrderElement; Util.bind(result, getHoursGetterFor(currentOrderElement), getHoursSetterFor(orderLine)); result.setConstraint(getHoursConstraintFor(orderLine)); } else { // If it's a container hours cell is not editable Util.bind(result, getHoursGetterFor(currentOrderElement)); } return result; } private Getter<Integer> getHoursGetterFor( final OrderElement currentOrderElement) { return new Util.Getter<Integer>() { @Override public Integer get() { return currentOrderElement.getWorkHours(); } }; } private Constraint getHoursConstraintFor(final OrderLine orderLine) { return new Constraint() { @Override public void validate(Component comp, Object value) throws WrongValueException { if (!orderLine.isTotalHoursValid((Integer) value)) { throw new WrongValueException( comp, _("Value is not valid, taking into account the current list of HoursGroup")); } } }; } private Setter<Integer> getHoursSetterFor(final OrderLine orderLine) { return new Util.Setter<Integer>() { @Override public void set(Integer value) { orderLine.setWorkHours(value); List<OrderElement> parentNodes = getModel().getParents( orderLine); // Remove the last element because it's an // Order node, not an OrderElement parentNodes.remove(parentNodes.size() - 1); for (OrderElement node : parentNodes) { Intbox intbox = hoursIntBoxByOrderElement.get(node); intbox.setValue(node.getWorkHours()); } } }; } @Override protected void addOperationsCell(final Treeitem item, final OrderElement currentOrderElement) { addCell(createEditButton(currentOrderElement, item), createRemoveButton(currentOrderElement)); } private Button createEditButton(final OrderElement currentOrderElement, final Treeitem item) { Button editbutton = createButton("/common/img/ico_editar1.png", _("Edit"), "/common/img/ico_editar.png", "icono", new EventListener() { @Override public void onEvent(Event event) throws Exception { showEditionOrderElement(item); } }); return editbutton; } } @Override protected boolean isPredicateApplied() { return predicate != null; } /** * Apply filter to order elements in current order */ public void onApplyFilter() { OrderElementPredicate predicate = createPredicate(); this.predicate = predicate; if (predicate != null) { filterByPredicate(predicate); } else { showAllOrderElements(); } } private OrderElementPredicate createPredicate() { List<FilterPair> listFilters = (List<FilterPair>) bdFiltersOrderElement .getSelectedElements(); Date startDate = filterStartDateOrderElement.getValue(); Date finishDate = filterFinishDateOrderElement.getValue(); String name = filterNameOrderElement.getValue(); if (listFilters.isEmpty() && startDate == null && finishDate == null && name == null) { return null; } return new OrderElementPredicate(listFilters, startDate, finishDate, name); } private void filterByPredicate(OrderElementPredicate predicate) { OrderElementTreeModel orderElementTreeModel = orderModel .getOrderElementsFilteredByPredicate(predicate); tree.setModel(orderElementTreeModel.asTree()); tree.invalidate(); } public void showAllOrderElements() { this.predicate = null; tree.setModel(orderModel.getOrderElementTreeModel().asTree()); tree.invalidate(); } @Override protected boolean isNewButtonDisabled() { if(readOnly) { return true; } return isPredicateApplied(); } /** * Clear {@link BandboxSearch} for Labels, and initializes * {@link IPredicate} */ public void clear() { selectDefaultTab(); bdFiltersOrderElement.clear(); predicate = null; } Tab tabGeneralData; private TemplateFinderPopup templateFinderPopup; private void selectDefaultTab() { tabGeneralData.setSelected(true); } @Override protected String createTooltipText(OrderElement elem) { StringBuilder tooltipText = new StringBuilder(); tooltipText.append(elem.getName() + ". "); if ((elem.getDescription() != null) && (!elem.getDescription().equals(""))) { tooltipText.append(elem.getDescription()); tooltipText.append(". "); } if ((elem.getLabels() != null) && (!elem.getLabels().isEmpty())) { tooltipText.append(" " + _("Labels") + ":"); tooltipText.append(StringUtils.join(elem.getLabels(), ",")); tooltipText.append("."); } if ((elem.getCriterionRequirements() != null) && (!elem.getCriterionRequirements().isEmpty())) { ArrayList<String> criterionNames = new ArrayList<String>(); for(CriterionRequirement each:elem.getCriterionRequirements()) { if (each.isValid()) { criterionNames.add(each.getCriterion().getName()); } } if (!criterionNames.isEmpty()) { tooltipText.append(" " + _("Criteria") + ":"); tooltipText.append(StringUtils.join(criterionNames, ",")); tooltipText.append("."); } } // To calculate other unit advances implement // getOtherAdvancesPercentage() tooltipText.append(" " + _("Advance") + ":" + elem.getAdvancePercentage()); tooltipText.append("."); // tooltipText.append(elem.getAdvancePercentage()); return tooltipText.toString(); } public void showEditionOrderElement(final Treeitem item) { OrderElement currentOrderElement = (OrderElement) item.getValue(); markModifiedTreeitem(item.getTreerow()); IOrderElementModel model = orderModel .getOrderElementModel(currentOrderElement); orderElementController.openWindow(model); updateOrderElementHours(currentOrderElement); } private void updateOrderElementHours(OrderElement orderElement) { if ((!readOnly) && (orderElement instanceof OrderLine)) { Intbox boxHours = (Intbox) getRenderer().hoursIntBoxByOrderElement .get(orderElement); boxHours.setValue(orderElement.getWorkHours()); Treecell tc = (Treecell) boxHours.getParent(); setReadOnlyHoursCell(orderElement, boxHours, tc); boxHours.invalidate(); } } private void setReadOnlyHoursCell(OrderElement orderElement, Intbox boxHours, Treecell tc) { if ((!readOnly) && (orderElement instanceof OrderLine)) { if (orderElement.getHoursGroups().size() > 1) { boxHours.setReadonly(true); tc .setTooltiptext(_("Not editable for containing more that an hours group.")); } else { boxHours.setReadonly(false); tc.setTooltiptext(""); } } } public Treeitem getTreeitemByOrderElement(OrderElement element) { List<Treeitem> listItems = new ArrayList<Treeitem>(this.tree.getItems()); for (Treeitem item : listItems) { OrderElement orderElement = (OrderElement) item.getValue(); if (orderElement.getId().equals(element.getId())) { return item; } } return null; } /** * Operations to filter the orders by multiple filters */ public Constraint checkConstraintFinishDate() { return new Constraint() { @Override public void validate(Component comp, Object value) throws WrongValueException { Date finishDate = (Date) value; if ((finishDate != null) && (filterStartDateOrderElement.getValue() != null) && (finishDate.compareTo(filterStartDateOrderElement .getValue()) < 0)) { filterFinishDateOrderElement.setValue(null); throw new WrongValueException(comp, _("must be greater than start date")); } } }; } public Constraint checkConstraintStartDate() { return new Constraint() { @Override public void validate(Component comp, Object value) throws WrongValueException { Date startDate = (Date) value; if ((startDate != null) && (filterFinishDateOrderElement.getValue() != null) && (startDate.compareTo(filterFinishDateOrderElement .getValue()) > 0)) { filterStartDateOrderElement.setValue(null); throw new WrongValueException(comp, _("must be lower than finish date")); } } }; } @Override protected void remove(OrderElement element) { boolean alreadyInUse = orderModel.isAlreadyInUse(element); if (alreadyInUse) { messagesForUser .showMessage( Level.ERROR, _( "You can not remove the task \"{0}\" because of this or any of its children are already in use in some work reports", element.getName())); } else { super.remove(element); orderElementCodeTextboxes.remove(element); } } @Override protected void refreshHoursBox(OrderElement node) { List<OrderElement> parentNodes = getModel().getParents(node); // Remove the last element because it's an // Order node, not an OrderElement parentNodes.remove(parentNodes.size() - 1); for (OrderElement parent : parentNodes) { getRenderer().hoursIntBoxByOrderElement.get(parent) .setValue(parent.getWorkHours()); } } }
Rename method Strip the name of implementation details. FEA: ItEr67S04BugFixing
navalplanner-webapp/src/main/java/org/navalplanner/web/orders/OrderElementTreeController.java
Rename method
<ide><path>avalplanner-webapp/src/main/java/org/navalplanner/web/orders/OrderElementTreeController.java <ide> <ide> private Map<Treerow, List<InputElement>> navigableElementsByRow = new HashMap<Treerow, List<InputElement>>(); <ide> <del> void registerKeyboardListener(final InputElement inputElement) { <add> void register(final InputElement inputElement) { <ide> inputElement.setCtrlKeys("#up#down"); <ide> registerNavigableElement(inputElement); <ide> inputElement.addEventListener("onCtrlKey", new EventListener() { <ide> textBox.setDisabled(true); <ide> } <ide> addCell(cssClass, textBox); <del> navigationHandler.registerKeyboardListener(textBox); <add> navigationHandler.register(textBox); <ide> } <ide> <ide> @Override <ide> } <ide> <ide> addCell(textBoxCode); <del> navigationHandler.registerKeyboardListener(textBoxCode); <add> navigationHandler.register(textBoxCode); <ide> orderElementCodeTextboxes.put(orderElement, textBoxCode); <ide> } <ide> <ide> dinamicDatebox.setDisabled(true); <ide> } <ide> addDateCell(dinamicDatebox, _("init"), currentOrderElement); <del> navigationHandler.registerKeyboardListener(dinamicDatebox <add> navigationHandler.register(dinamicDatebox <ide> .getDateTextBox()); <ide> } <ide> <ide> dinamicDatebox.setDisabled(true); <ide> } <ide> addDateCell(dinamicDatebox, _("end"), currentOrderElement); <del> navigationHandler.registerKeyboardListener(dinamicDatebox <add> navigationHandler.register(dinamicDatebox <ide> .getDateTextBox()); <ide> } <ide> <ide> <ide> Treecell cellHours = addCell(intboxHours); <ide> setReadOnlyHoursCell(currentOrderElement, intboxHours, cellHours); <del> navigationHandler.registerKeyboardListener(intboxHours); <add> navigationHandler.register(intboxHours); <ide> } <ide> <ide> private void addDateCell(final DynamicDatebox dinamicDatebox,
Java
apache-2.0
0e5c3227a72dbf6ce282ed59b46a9d43fa7d1cde
0
ernestp/consulo,vvv1559/intellij-community,caot/intellij-community,diorcety/intellij-community,idea4bsd/idea4bsd,ivan-fedorov/intellij-community,FHannes/intellij-community,dslomov/intellij-community,Lekanich/intellij-community,asedunov/intellij-community,diorcety/intellij-community,adedayo/intellij-community,SerCeMan/intellij-community,blademainer/intellij-community,kool79/intellij-community,nicolargo/intellij-community,holmes/intellij-community,ol-loginov/intellij-community,xfournet/intellij-community,fitermay/intellij-community,ibinti/intellij-community,ftomassetti/intellij-community,semonte/intellij-community,izonder/intellij-community,supersven/intellij-community,jagguli/intellij-community,dslomov/intellij-community,holmes/intellij-community,robovm/robovm-studio,ThiagoGarciaAlves/intellij-community,samthor/intellij-community,clumsy/intellij-community,orekyuu/intellij-community,vladmm/intellij-community,lucafavatella/intellij-community,fitermay/intellij-community,ibinti/intellij-community,FHannes/intellij-community,hurricup/intellij-community,Distrotech/intellij-community,samthor/intellij-community,fengbaicanhe/intellij-community,xfournet/intellij-community,FHannes/intellij-community,Distrotech/intellij-community,kdwink/intellij-community,amith01994/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,supersven/intellij-community,salguarnieri/intellij-community,jagguli/intellij-community,holmes/intellij-community,Distrotech/intellij-community,dslomov/intellij-community,ibinti/intellij-community,ivan-fedorov/intellij-community,fnouama/intellij-community,joewalnes/idea-community,tmpgit/intellij-community,da1z/intellij-community,ibinti/intellij-community,ol-loginov/intellij-community,orekyuu/intellij-community,petteyg/intellij-community,apixandru/intellij-community,ol-loginov/intellij-community,apixandru/intellij-community,izonder/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,SerCeMan/intellij-community,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,kool79/intellij-community,pwoodworth/intellij-community,tmpgit/intellij-community,SerCeMan/intellij-community,FHannes/intellij-community,vladmm/intellij-community,suncycheng/intellij-community,blademainer/intellij-community,holmes/intellij-community,ibinti/intellij-community,holmes/intellij-community,izonder/intellij-community,diorcety/intellij-community,FHannes/intellij-community,salguarnieri/intellij-community,robovm/robovm-studio,ernestp/consulo,clumsy/intellij-community,SerCeMan/intellij-community,muntasirsyed/intellij-community,youdonghai/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,amith01994/intellij-community,salguarnieri/intellij-community,blademainer/intellij-community,vvv1559/intellij-community,fengbaicanhe/intellij-community,robovm/robovm-studio,alphafoobar/intellij-community,fnouama/intellij-community,youdonghai/intellij-community,signed/intellij-community,apixandru/intellij-community,MER-GROUP/intellij-community,suncycheng/intellij-community,orekyuu/intellij-community,FHannes/intellij-community,consulo/consulo,michaelgallacher/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,fitermay/intellij-community,holmes/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,akosyakov/intellij-community,caot/intellij-community,salguarnieri/intellij-community,blademainer/intellij-community,muntasirsyed/intellij-community,suncycheng/intellij-community,MichaelNedzelsky/intellij-community,kdwink/intellij-community,gnuhub/intellij-community,petteyg/intellij-community,michaelgallacher/intellij-community,michaelgallacher/intellij-community,amith01994/intellij-community,vladmm/intellij-community,fengbaicanhe/intellij-community,mglukhikh/intellij-community,izonder/intellij-community,SerCeMan/intellij-community,ol-loginov/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,hurricup/intellij-community,ahb0327/intellij-community,supersven/intellij-community,semonte/intellij-community,salguarnieri/intellij-community,Distrotech/intellij-community,tmpgit/intellij-community,idea4bsd/idea4bsd,petteyg/intellij-community,youdonghai/intellij-community,da1z/intellij-community,samthor/intellij-community,ahb0327/intellij-community,MichaelNedzelsky/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,hurricup/intellij-community,ivan-fedorov/intellij-community,michaelgallacher/intellij-community,blademainer/intellij-community,wreckJ/intellij-community,Lekanich/intellij-community,amith01994/intellij-community,asedunov/intellij-community,signed/intellij-community,allotria/intellij-community,pwoodworth/intellij-community,nicolargo/intellij-community,samthor/intellij-community,akosyakov/intellij-community,nicolargo/intellij-community,kool79/intellij-community,Distrotech/intellij-community,alphafoobar/intellij-community,ibinti/intellij-community,diorcety/intellij-community,da1z/intellij-community,asedunov/intellij-community,Distrotech/intellij-community,holmes/intellij-community,hurricup/intellij-community,izonder/intellij-community,ryano144/intellij-community,FHannes/intellij-community,jagguli/intellij-community,ahb0327/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,ivan-fedorov/intellij-community,samthor/intellij-community,vvv1559/intellij-community,orekyuu/intellij-community,dslomov/intellij-community,blademainer/intellij-community,orekyuu/intellij-community,ryano144/intellij-community,da1z/intellij-community,xfournet/intellij-community,retomerz/intellij-community,dslomov/intellij-community,vvv1559/intellij-community,muntasirsyed/intellij-community,kdwink/intellij-community,adedayo/intellij-community,ahb0327/intellij-community,pwoodworth/intellij-community,wreckJ/intellij-community,kool79/intellij-community,robovm/robovm-studio,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,lucafavatella/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,caot/intellij-community,fitermay/intellij-community,mglukhikh/intellij-community,lucafavatella/intellij-community,salguarnieri/intellij-community,fitermay/intellij-community,Distrotech/intellij-community,ernestp/consulo,izonder/intellij-community,caot/intellij-community,idea4bsd/idea4bsd,akosyakov/intellij-community,orekyuu/intellij-community,ahb0327/intellij-community,joewalnes/idea-community,mglukhikh/intellij-community,tmpgit/intellij-community,pwoodworth/intellij-community,fengbaicanhe/intellij-community,ahb0327/intellij-community,Distrotech/intellij-community,retomerz/intellij-community,mglukhikh/intellij-community,vladmm/intellij-community,asedunov/intellij-community,MER-GROUP/intellij-community,fnouama/intellij-community,caot/intellij-community,lucafavatella/intellij-community,retomerz/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,muntasirsyed/intellij-community,blademainer/intellij-community,allotria/intellij-community,muntasirsyed/intellij-community,supersven/intellij-community,tmpgit/intellij-community,ol-loginov/intellij-community,idea4bsd/idea4bsd,michaelgallacher/intellij-community,suncycheng/intellij-community,samthor/intellij-community,slisson/intellij-community,robovm/robovm-studio,retomerz/intellij-community,amith01994/intellij-community,izonder/intellij-community,suncycheng/intellij-community,TangHao1987/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,Lekanich/intellij-community,orekyuu/intellij-community,wreckJ/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,tmpgit/intellij-community,salguarnieri/intellij-community,gnuhub/intellij-community,retomerz/intellij-community,Distrotech/intellij-community,robovm/robovm-studio,fengbaicanhe/intellij-community,MER-GROUP/intellij-community,fnouama/intellij-community,akosyakov/intellij-community,retomerz/intellij-community,ivan-fedorov/intellij-community,Lekanich/intellij-community,TangHao1987/intellij-community,allotria/intellij-community,akosyakov/intellij-community,semonte/intellij-community,slisson/intellij-community,pwoodworth/intellij-community,apixandru/intellij-community,slisson/intellij-community,gnuhub/intellij-community,gnuhub/intellij-community,asedunov/intellij-community,da1z/intellij-community,ryano144/intellij-community,ryano144/intellij-community,vladmm/intellij-community,ivan-fedorov/intellij-community,caot/intellij-community,ol-loginov/intellij-community,MichaelNedzelsky/intellij-community,ahb0327/intellij-community,pwoodworth/intellij-community,orekyuu/intellij-community,signed/intellij-community,ryano144/intellij-community,amith01994/intellij-community,holmes/intellij-community,xfournet/intellij-community,fengbaicanhe/intellij-community,amith01994/intellij-community,petteyg/intellij-community,diorcety/intellij-community,ol-loginov/intellij-community,ivan-fedorov/intellij-community,signed/intellij-community,retomerz/intellij-community,apixandru/intellij-community,ftomassetti/intellij-community,TangHao1987/intellij-community,consulo/consulo,MichaelNedzelsky/intellij-community,fitermay/intellij-community,jagguli/intellij-community,TangHao1987/intellij-community,vvv1559/intellij-community,retomerz/intellij-community,MER-GROUP/intellij-community,ivan-fedorov/intellij-community,MichaelNedzelsky/intellij-community,supersven/intellij-community,izonder/intellij-community,holmes/intellij-community,supersven/intellij-community,ftomassetti/intellij-community,caot/intellij-community,joewalnes/idea-community,wreckJ/intellij-community,alphafoobar/intellij-community,fitermay/intellij-community,kdwink/intellij-community,ahb0327/intellij-community,michaelgallacher/intellij-community,slisson/intellij-community,ryano144/intellij-community,clumsy/intellij-community,kdwink/intellij-community,wreckJ/intellij-community,asedunov/intellij-community,MichaelNedzelsky/intellij-community,clumsy/intellij-community,signed/intellij-community,youdonghai/intellij-community,ivan-fedorov/intellij-community,hurricup/intellij-community,kool79/intellij-community,joewalnes/idea-community,idea4bsd/idea4bsd,TangHao1987/intellij-community,suncycheng/intellij-community,jagguli/intellij-community,kdwink/intellij-community,fengbaicanhe/intellij-community,clumsy/intellij-community,kool79/intellij-community,akosyakov/intellij-community,Lekanich/intellij-community,signed/intellij-community,mglukhikh/intellij-community,supersven/intellij-community,ftomassetti/intellij-community,adedayo/intellij-community,tmpgit/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,ryano144/intellij-community,adedayo/intellij-community,samthor/intellij-community,holmes/intellij-community,kdwink/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,MichaelNedzelsky/intellij-community,FHannes/intellij-community,hurricup/intellij-community,alphafoobar/intellij-community,asedunov/intellij-community,michaelgallacher/intellij-community,allotria/intellij-community,lucafavatella/intellij-community,MER-GROUP/intellij-community,TangHao1987/intellij-community,wreckJ/intellij-community,fitermay/intellij-community,muntasirsyed/intellij-community,ivan-fedorov/intellij-community,robovm/robovm-studio,tmpgit/intellij-community,fnouama/intellij-community,samthor/intellij-community,joewalnes/idea-community,MER-GROUP/intellij-community,dslomov/intellij-community,semonte/intellij-community,xfournet/intellij-community,kool79/intellij-community,ahb0327/intellij-community,akosyakov/intellij-community,jagguli/intellij-community,ThiagoGarciaAlves/intellij-community,consulo/consulo,retomerz/intellij-community,caot/intellij-community,ftomassetti/intellij-community,ivan-fedorov/intellij-community,lucafavatella/intellij-community,signed/intellij-community,alphafoobar/intellij-community,salguarnieri/intellij-community,joewalnes/idea-community,Distrotech/intellij-community,wreckJ/intellij-community,samthor/intellij-community,ernestp/consulo,MichaelNedzelsky/intellij-community,allotria/intellij-community,retomerz/intellij-community,kool79/intellij-community,caot/intellij-community,vvv1559/intellij-community,da1z/intellij-community,gnuhub/intellij-community,clumsy/intellij-community,hurricup/intellij-community,blademainer/intellij-community,ol-loginov/intellij-community,vladmm/intellij-community,semonte/intellij-community,da1z/intellij-community,robovm/robovm-studio,SerCeMan/intellij-community,fengbaicanhe/intellij-community,kool79/intellij-community,idea4bsd/idea4bsd,jagguli/intellij-community,youdonghai/intellij-community,fitermay/intellij-community,fnouama/intellij-community,youdonghai/intellij-community,petteyg/intellij-community,petteyg/intellij-community,ftomassetti/intellij-community,wreckJ/intellij-community,akosyakov/intellij-community,gnuhub/intellij-community,ibinti/intellij-community,ibinti/intellij-community,kdwink/intellij-community,vladmm/intellij-community,ftomassetti/intellij-community,TangHao1987/intellij-community,nicolargo/intellij-community,samthor/intellij-community,signed/intellij-community,vvv1559/intellij-community,amith01994/intellij-community,semonte/intellij-community,alphafoobar/intellij-community,lucafavatella/intellij-community,muntasirsyed/intellij-community,xfournet/intellij-community,signed/intellij-community,gnuhub/intellij-community,ernestp/consulo,orekyuu/intellij-community,consulo/consulo,kool79/intellij-community,slisson/intellij-community,gnuhub/intellij-community,vladmm/intellij-community,fnouama/intellij-community,ernestp/consulo,pwoodworth/intellij-community,da1z/intellij-community,Distrotech/intellij-community,nicolargo/intellij-community,Lekanich/intellij-community,fitermay/intellij-community,ol-loginov/intellij-community,joewalnes/idea-community,tmpgit/intellij-community,amith01994/intellij-community,ftomassetti/intellij-community,MichaelNedzelsky/intellij-community,TangHao1987/intellij-community,ahb0327/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,nicolargo/intellij-community,jagguli/intellij-community,tmpgit/intellij-community,idea4bsd/idea4bsd,orekyuu/intellij-community,apixandru/intellij-community,TangHao1987/intellij-community,robovm/robovm-studio,vladmm/intellij-community,clumsy/intellij-community,TangHao1987/intellij-community,adedayo/intellij-community,MichaelNedzelsky/intellij-community,fnouama/intellij-community,orekyuu/intellij-community,retomerz/intellij-community,blademainer/intellij-community,mglukhikh/intellij-community,hurricup/intellij-community,ryano144/intellij-community,kool79/intellij-community,diorcety/intellij-community,alphafoobar/intellij-community,muntasirsyed/intellij-community,tmpgit/intellij-community,supersven/intellij-community,slisson/intellij-community,ThiagoGarciaAlves/intellij-community,gnuhub/intellij-community,pwoodworth/intellij-community,allotria/intellij-community,tmpgit/intellij-community,izonder/intellij-community,allotria/intellij-community,hurricup/intellij-community,dslomov/intellij-community,salguarnieri/intellij-community,fnouama/intellij-community,hurricup/intellij-community,MichaelNedzelsky/intellij-community,suncycheng/intellij-community,jagguli/intellij-community,MER-GROUP/intellij-community,SerCeMan/intellij-community,amith01994/intellij-community,joewalnes/idea-community,idea4bsd/idea4bsd,jagguli/intellij-community,apixandru/intellij-community,signed/intellij-community,youdonghai/intellij-community,fengbaicanhe/intellij-community,slisson/intellij-community,michaelgallacher/intellij-community,ibinti/intellij-community,petteyg/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,holmes/intellij-community,ThiagoGarciaAlves/intellij-community,blademainer/intellij-community,fengbaicanhe/intellij-community,asedunov/intellij-community,suncycheng/intellij-community,semonte/intellij-community,ol-loginov/intellij-community,clumsy/intellij-community,hurricup/intellij-community,amith01994/intellij-community,fitermay/intellij-community,petteyg/intellij-community,dslomov/intellij-community,ftomassetti/intellij-community,petteyg/intellij-community,lucafavatella/intellij-community,ftomassetti/intellij-community,da1z/intellij-community,xfournet/intellij-community,consulo/consulo,fengbaicanhe/intellij-community,fnouama/intellij-community,adedayo/intellij-community,retomerz/intellij-community,asedunov/intellij-community,diorcety/intellij-community,muntasirsyed/intellij-community,adedayo/intellij-community,SerCeMan/intellij-community,hurricup/intellij-community,suncycheng/intellij-community,ol-loginov/intellij-community,fitermay/intellij-community,xfournet/intellij-community,allotria/intellij-community,diorcety/intellij-community,amith01994/intellij-community,MER-GROUP/intellij-community,ol-loginov/intellij-community,caot/intellij-community,clumsy/intellij-community,da1z/intellij-community,fnouama/intellij-community,ryano144/intellij-community,apixandru/intellij-community,izonder/intellij-community,kdwink/intellij-community,orekyuu/intellij-community,semonte/intellij-community,blademainer/intellij-community,youdonghai/intellij-community,kool79/intellij-community,dslomov/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,akosyakov/intellij-community,jagguli/intellij-community,semonte/intellij-community,wreckJ/intellij-community,diorcety/intellij-community,SerCeMan/intellij-community,TangHao1987/intellij-community,kdwink/intellij-community,izonder/intellij-community,jagguli/intellij-community,gnuhub/intellij-community,slisson/intellij-community,apixandru/intellij-community,adedayo/intellij-community,nicolargo/intellij-community,ivan-fedorov/intellij-community,pwoodworth/intellij-community,samthor/intellij-community,MichaelNedzelsky/intellij-community,lucafavatella/intellij-community,kdwink/intellij-community,pwoodworth/intellij-community,ftomassetti/intellij-community,pwoodworth/intellij-community,salguarnieri/intellij-community,samthor/intellij-community,Lekanich/intellij-community,nicolargo/intellij-community,MER-GROUP/intellij-community,lucafavatella/intellij-community,Lekanich/intellij-community,idea4bsd/idea4bsd,MER-GROUP/intellij-community,dslomov/intellij-community,robovm/robovm-studio,vladmm/intellij-community,retomerz/intellij-community,wreckJ/intellij-community,slisson/intellij-community,nicolargo/intellij-community,signed/intellij-community,petteyg/intellij-community,clumsy/intellij-community,alphafoobar/intellij-community,lucafavatella/intellij-community,vladmm/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,Lekanich/intellij-community,nicolargo/intellij-community,SerCeMan/intellij-community,ibinti/intellij-community,slisson/intellij-community,apixandru/intellij-community,Lekanich/intellij-community,nicolargo/intellij-community,diorcety/intellij-community,adedayo/intellij-community,ryano144/intellij-community,ahb0327/intellij-community,robovm/robovm-studio,mglukhikh/intellij-community,asedunov/intellij-community,pwoodworth/intellij-community,ftomassetti/intellij-community,clumsy/intellij-community,ryano144/intellij-community,adedayo/intellij-community,SerCeMan/intellij-community,idea4bsd/idea4bsd,izonder/intellij-community,blademainer/intellij-community,gnuhub/intellij-community,lucafavatella/intellij-community,allotria/intellij-community,allotria/intellij-community,diorcety/intellij-community,supersven/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,gnuhub/intellij-community,Lekanich/intellij-community,semonte/intellij-community,allotria/intellij-community,consulo/consulo,wreckJ/intellij-community,youdonghai/intellij-community,diorcety/intellij-community,caot/intellij-community,petteyg/intellij-community,caot/intellij-community,xfournet/intellij-community,supersven/intellij-community,ibinti/intellij-community,FHannes/intellij-community,FHannes/intellij-community,clumsy/intellij-community,alphafoobar/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,salguarnieri/intellij-community,vladmm/intellij-community,alphafoobar/intellij-community,Lekanich/intellij-community,michaelgallacher/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,alphafoobar/intellij-community,muntasirsyed/intellij-community,adedayo/intellij-community,robovm/robovm-studio,michaelgallacher/intellij-community,MER-GROUP/intellij-community,ryano144/intellij-community,supersven/intellij-community,nicolargo/intellij-community,holmes/intellij-community,asedunov/intellij-community,fengbaicanhe/intellij-community,allotria/intellij-community,youdonghai/intellij-community,dslomov/intellij-community,muntasirsyed/intellij-community,joewalnes/idea-community,ibinti/intellij-community,youdonghai/intellij-community,slisson/intellij-community,MER-GROUP/intellij-community,semonte/intellij-community,Distrotech/intellij-community,hurricup/intellij-community,FHannes/intellij-community,adedayo/intellij-community,akosyakov/intellij-community,ahb0327/intellij-community,kdwink/intellij-community,dslomov/intellij-community,TangHao1987/intellij-community,fnouama/intellij-community,alphafoobar/intellij-community,michaelgallacher/intellij-community,xfournet/intellij-community,slisson/intellij-community,da1z/intellij-community,muntasirsyed/intellij-community,wreckJ/intellij-community,mglukhikh/intellij-community,SerCeMan/intellij-community,supersven/intellij-community,akosyakov/intellij-community
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring.copy; import com.intellij.ide.util.DirectoryUtil; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory; import com.intellij.openapi.help.HelpManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.ui.impl.DialogWrapperPeerImpl; import com.intellij.psi.PsiDirectory; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.refactoring.RefactoringBundle; import com.intellij.ui.*; import com.intellij.util.IncorrectOperationException; import org.jetbrains.annotations.NonNls; import javax.swing.*; import javax.swing.event.DocumentEvent; import javax.swing.event.DocumentListener; import java.awt.*; import java.io.File; class CopyFilesOrDirectoriesDialog extends DialogWrapper{ private JLabel myInformationLabel; private EditorComboWithBrowseButton myTargetDirectoryField; private JTextField myNewNameField; private final Project myProject; private final boolean myShowDirectoryField; private final boolean myShowNewNameField; private PsiDirectory myTargetDirectory; @NonNls private static final String RECENT_KEYS = "CopyFile.RECENT_KEYS"; public CopyFilesOrDirectoriesDialog(PsiElement[] elements, PsiDirectory defaultTargetDirectory, Project project, boolean doClone) { super(project, true); myProject = project; myShowDirectoryField = !doClone; myShowNewNameField = elements.length == 1; if (doClone && elements.length != 1) { throw new IllegalArgumentException("wrong number of elements to clone: " + elements.length); } setTitle(doClone ? RefactoringBundle.message("copy.files.clone.title") : RefactoringBundle.message("copy.files.copy.title")); init(); if (elements.length == 1) { String text; if (elements[0] instanceof PsiFile) { PsiFile file = (PsiFile)elements[0]; text = doClone ? RefactoringBundle.message("copy.files.clone.file.0", file.getVirtualFile().getPresentableUrl()) : RefactoringBundle.message("copy.files.copy.file.0", file.getVirtualFile().getPresentableUrl()); final String fileName = file.getName(); myNewNameField.setText(fileName); final int dotIdx = fileName.lastIndexOf("."); if (dotIdx > -1) { myNewNameField.select(0, dotIdx); myNewNameField.putClientProperty(DialogWrapperPeerImpl.HAVE_INITIAL_SELECTION, true); } } else { PsiDirectory directory = (PsiDirectory)elements[0]; text = doClone ? RefactoringBundle.message("copy.files.clone.directory.0", directory.getVirtualFile().getPresentableUrl()) : RefactoringBundle.message("copy.files.copy.directory.0", directory.getVirtualFile().getPresentableUrl()); myNewNameField.setText(directory.getName()); } myInformationLabel.setText(text); } else { setMultipleElementCopyLabel(elements); } if (myShowDirectoryField) { myTargetDirectoryField.prependItem(defaultTargetDirectory == null ? "" : defaultTargetDirectory.getVirtualFile().getPresentableUrl()); } validateOKButton(); } private void setMultipleElementCopyLabel(PsiElement[] elements) { boolean allFiles = true; boolean allDirectories = true; for (PsiElement element : elements) { if (element instanceof PsiDirectory) { allFiles = false; } else { allDirectories = false; } } if (allFiles) { myInformationLabel.setText(RefactoringBundle.message("copy.files.copy.specified.files.label")); } else if (allDirectories) { myInformationLabel.setText(RefactoringBundle.message("copy.files.copy.specified.directories.label")); } else { myInformationLabel.setText(RefactoringBundle.message("copy.files.copy.specified.mixed.label")); } } protected Action[] createActions(){ return new Action[]{getOKAction(),getCancelAction(),getHelpAction()}; } public JComponent getPreferredFocusedComponent() { return myNewNameField; } protected JComponent createCenterPanel() { return new JPanel(new BorderLayout()); } protected JComponent createNorthPanel() { JPanel panel = new JPanel(new GridBagLayout()); panel.setBorder(IdeBorderFactory.createRoundedBorder()); myInformationLabel = new JLabel(); panel.add(myInformationLabel, new GridBagConstraints(0,0,2,1,1,0,GridBagConstraints.WEST,GridBagConstraints.HORIZONTAL,new Insets(4,8,4,8),0,0)); DocumentListener documentListener = new DocumentAdapter() { public void textChanged(DocumentEvent event) { validateOKButton(); } }; if (myShowNewNameField) { myNewNameField = new JTextField(); Dimension size = myNewNameField.getPreferredSize(); FontMetrics fontMetrics = myNewNameField.getFontMetrics(myNewNameField.getFont()); size.width = fontMetrics.charWidth('a') * 60; myNewNameField.setPreferredSize(size); panel.add(new JLabel(RefactoringBundle.message("copy.files.new.name.label")), new GridBagConstraints(0,1,1,1,0,0,GridBagConstraints.WEST,GridBagConstraints.HORIZONTAL,new Insets(4,8,4,8),0,0)); panel.add(myNewNameField, new GridBagConstraints(1,1,1,1,1,0,GridBagConstraints.WEST,GridBagConstraints.HORIZONTAL,new Insets(4,0,4,8),0,0)); myNewNameField.getDocument().addDocumentListener(documentListener); } if (myShowDirectoryField) { panel.add(new JLabel(RefactoringBundle.message("copy.files.to.directory.label")), new GridBagConstraints(0,2,1,1,0,0,GridBagConstraints.WEST,GridBagConstraints.HORIZONTAL,new Insets(4,8,4,8),0,0)); myTargetDirectoryField = new EditorComboWithBrowseButton(null, "", myProject, RECENT_KEYS); myTargetDirectoryField.addBrowseFolderListener(RefactoringBundle.message("select.target.directory"), RefactoringBundle.message("the.file.will.be.copied.to.this.directory"), myProject, FileChooserDescriptorFactory.createSingleFolderDescriptor(), EditorComboBox.COMPONENT_ACCESSOR); myTargetDirectoryField.setTextFieldPreferredWidth(60); panel.add(myTargetDirectoryField, new GridBagConstraints(1,2,1,1,1,0,GridBagConstraints.WEST,GridBagConstraints.HORIZONTAL,new Insets(4,0,4,8),0,0)); myTargetDirectoryField.getChildComponent().getDocument().addDocumentListener(new com.intellij.openapi.editor.event.DocumentAdapter() { @Override public void documentChanged(com.intellij.openapi.editor.event.DocumentEvent e) { validateOKButton(); } }); } return panel; } public PsiDirectory getTargetDirectory() { return myTargetDirectory; } public String getNewName() { return myNewNameField != null ? myNewNameField.getText().trim() : null; } protected void doOKAction(){ if (myShowNewNameField) { String newName = getNewName(); if (newName.length() == 0) { Messages.showMessageDialog(myProject, RefactoringBundle.message("no.new.name.specified"), RefactoringBundle.message("error.title"), Messages.getErrorIcon()); return; } } if (myShowDirectoryField) { final String targetDirectoryName = myTargetDirectoryField.getText(); if (targetDirectoryName.length() == 0) { Messages.showMessageDialog(myProject, RefactoringBundle.message("no.target.directory.specified"), RefactoringBundle.message("error.title"), Messages.getErrorIcon()); return; } RecentsManager.getInstance(myProject).registerRecentEntry(RECENT_KEYS, targetDirectoryName); CommandProcessor.getInstance().executeCommand(myProject, new Runnable() { public void run() { ApplicationManager.getApplication().runWriteAction(new Runnable() { public void run() { try { myTargetDirectory = DirectoryUtil.mkdirs(PsiManager.getInstance(myProject), targetDirectoryName.replace(File.separatorChar, '/')); } catch (IncorrectOperationException e) { } } }); } }, RefactoringBundle.message("create.directory"), null); if (myTargetDirectory == null) { Messages.showMessageDialog(myProject, RefactoringBundle.message("cannot.create.directory"), RefactoringBundle.message("error.title"), Messages.getErrorIcon()); return; } } super.doOKAction(); } private void validateOKButton() { if (myShowDirectoryField) { if (myTargetDirectoryField.getText().length() == 0) { setOKActionEnabled(false); return; } } if (myShowNewNameField) { if (getNewName().length() == 0) { setOKActionEnabled(false); return; } } setOKActionEnabled(true); } protected void doHelpAction() { HelpManager.getInstance().invokeHelp("refactoring.copyClass"); } }
platform/lang-impl/src/com/intellij/refactoring/copy/CopyFilesOrDirectoriesDialog.java
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring.copy; import com.intellij.ide.util.DirectoryUtil; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory; import com.intellij.openapi.help.HelpManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.Messages; import com.intellij.psi.PsiDirectory; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.refactoring.RefactoringBundle; import com.intellij.ui.*; import com.intellij.util.IncorrectOperationException; import org.jetbrains.annotations.NonNls; import javax.swing.*; import javax.swing.event.DocumentEvent; import javax.swing.event.DocumentListener; import java.awt.*; import java.io.File; class CopyFilesOrDirectoriesDialog extends DialogWrapper{ private JLabel myInformationLabel; private EditorComboWithBrowseButton myTargetDirectoryField; private JTextField myNewNameField; private final Project myProject; private final boolean myShowDirectoryField; private final boolean myShowNewNameField; private PsiDirectory myTargetDirectory; @NonNls private static final String RECENT_KEYS = "CopyFile.RECENT_KEYS"; public CopyFilesOrDirectoriesDialog(PsiElement[] elements, PsiDirectory defaultTargetDirectory, Project project, boolean doClone) { super(project, true); myProject = project; myShowDirectoryField = !doClone; myShowNewNameField = elements.length == 1; if (doClone && elements.length != 1) { throw new IllegalArgumentException("wrong number of elements to clone: " + elements.length); } setTitle(doClone ? RefactoringBundle.message("copy.files.clone.title") : RefactoringBundle.message("copy.files.copy.title")); init(); if (elements.length == 1) { String text; if (elements[0] instanceof PsiFile) { PsiFile file = (PsiFile)elements[0]; text = doClone ? RefactoringBundle.message("copy.files.clone.file.0", file.getVirtualFile().getPresentableUrl()) : RefactoringBundle.message("copy.files.copy.file.0", file.getVirtualFile().getPresentableUrl()); myNewNameField.setText(file.getName()); } else { PsiDirectory directory = (PsiDirectory)elements[0]; text = doClone ? RefactoringBundle.message("copy.files.clone.directory.0", directory.getVirtualFile().getPresentableUrl()) : RefactoringBundle.message("copy.files.copy.directory.0", directory.getVirtualFile().getPresentableUrl()); myNewNameField.setText(directory.getName()); } myInformationLabel.setText(text); } else { setMultipleElementCopyLabel(elements); } if (myShowDirectoryField) { myTargetDirectoryField.prependItem(defaultTargetDirectory == null ? "" : defaultTargetDirectory.getVirtualFile().getPresentableUrl()); } validateOKButton(); } private void setMultipleElementCopyLabel(PsiElement[] elements) { boolean allFiles = true; boolean allDirectories = true; for (PsiElement element : elements) { if (element instanceof PsiDirectory) { allFiles = false; } else { allDirectories = false; } } if (allFiles) { myInformationLabel.setText(RefactoringBundle.message("copy.files.copy.specified.files.label")); } else if (allDirectories) { myInformationLabel.setText(RefactoringBundle.message("copy.files.copy.specified.directories.label")); } else { myInformationLabel.setText(RefactoringBundle.message("copy.files.copy.specified.mixed.label")); } } protected Action[] createActions(){ return new Action[]{getOKAction(),getCancelAction(),getHelpAction()}; } public JComponent getPreferredFocusedComponent() { return myNewNameField; } protected JComponent createCenterPanel() { return new JPanel(new BorderLayout()); } protected JComponent createNorthPanel() { JPanel panel = new JPanel(new GridBagLayout()); panel.setBorder(IdeBorderFactory.createRoundedBorder()); myInformationLabel = new JLabel(); panel.add(myInformationLabel, new GridBagConstraints(0,0,2,1,1,0,GridBagConstraints.WEST,GridBagConstraints.HORIZONTAL,new Insets(4,8,4,8),0,0)); DocumentListener documentListener = new DocumentAdapter() { public void textChanged(DocumentEvent event) { validateOKButton(); } }; if (myShowNewNameField) { myNewNameField = new JTextField(); Dimension size = myNewNameField.getPreferredSize(); FontMetrics fontMetrics = myNewNameField.getFontMetrics(myNewNameField.getFont()); size.width = fontMetrics.charWidth('a') * 60; myNewNameField.setPreferredSize(size); panel.add(new JLabel(RefactoringBundle.message("copy.files.new.name.label")), new GridBagConstraints(0,1,1,1,0,0,GridBagConstraints.WEST,GridBagConstraints.HORIZONTAL,new Insets(4,8,4,8),0,0)); panel.add(myNewNameField, new GridBagConstraints(1,1,1,1,1,0,GridBagConstraints.WEST,GridBagConstraints.HORIZONTAL,new Insets(4,0,4,8),0,0)); myNewNameField.getDocument().addDocumentListener(documentListener); } if (myShowDirectoryField) { panel.add(new JLabel(RefactoringBundle.message("copy.files.to.directory.label")), new GridBagConstraints(0,2,1,1,0,0,GridBagConstraints.WEST,GridBagConstraints.HORIZONTAL,new Insets(4,8,4,8),0,0)); myTargetDirectoryField = new EditorComboWithBrowseButton(null, "", myProject, RECENT_KEYS); myTargetDirectoryField.addBrowseFolderListener(RefactoringBundle.message("select.target.directory"), RefactoringBundle.message("the.file.will.be.copied.to.this.directory"), myProject, FileChooserDescriptorFactory.createSingleFolderDescriptor(), EditorComboBox.COMPONENT_ACCESSOR); myTargetDirectoryField.setTextFieldPreferredWidth(60); panel.add(myTargetDirectoryField, new GridBagConstraints(1,2,1,1,1,0,GridBagConstraints.WEST,GridBagConstraints.HORIZONTAL,new Insets(4,0,4,8),0,0)); myTargetDirectoryField.getChildComponent().getDocument().addDocumentListener(new com.intellij.openapi.editor.event.DocumentAdapter() { @Override public void documentChanged(com.intellij.openapi.editor.event.DocumentEvent e) { validateOKButton(); } }); } return panel; } public PsiDirectory getTargetDirectory() { return myTargetDirectory; } public String getNewName() { return myNewNameField != null ? myNewNameField.getText().trim() : null; } protected void doOKAction(){ if (myShowNewNameField) { String newName = getNewName(); if (newName.length() == 0) { Messages.showMessageDialog(myProject, RefactoringBundle.message("no.new.name.specified"), RefactoringBundle.message("error.title"), Messages.getErrorIcon()); return; } } if (myShowDirectoryField) { final String targetDirectoryName = myTargetDirectoryField.getText(); if (targetDirectoryName.length() == 0) { Messages.showMessageDialog(myProject, RefactoringBundle.message("no.target.directory.specified"), RefactoringBundle.message("error.title"), Messages.getErrorIcon()); return; } RecentsManager.getInstance(myProject).registerRecentEntry(RECENT_KEYS, targetDirectoryName); CommandProcessor.getInstance().executeCommand(myProject, new Runnable() { public void run() { ApplicationManager.getApplication().runWriteAction(new Runnable() { public void run() { try { myTargetDirectory = DirectoryUtil.mkdirs(PsiManager.getInstance(myProject), targetDirectoryName.replace(File.separatorChar, '/')); } catch (IncorrectOperationException e) { } } }); } }, RefactoringBundle.message("create.directory"), null); if (myTargetDirectory == null) { Messages.showMessageDialog(myProject, RefactoringBundle.message("cannot.create.directory"), RefactoringBundle.message("error.title"), Messages.getErrorIcon()); return; } } super.doOKAction(); } private void validateOKButton() { if (myShowDirectoryField) { if (myTargetDirectoryField.getText().length() == 0) { setOKActionEnabled(false); return; } } if (myShowNewNameField) { if (getNewName().length() == 0) { setOKActionEnabled(false); return; } } setOKActionEnabled(true); } protected void doHelpAction() { HelpManager.getInstance().invokeHelp("refactoring.copyClass"); } }
copy file: exclude file extension from selection (IDEA-61081)
platform/lang-impl/src/com/intellij/refactoring/copy/CopyFilesOrDirectoriesDialog.java
copy file: exclude file extension from selection (IDEA-61081)
<ide><path>latform/lang-impl/src/com/intellij/refactoring/copy/CopyFilesOrDirectoriesDialog.java <ide> import com.intellij.openapi.project.Project; <ide> import com.intellij.openapi.ui.DialogWrapper; <ide> import com.intellij.openapi.ui.Messages; <add>import com.intellij.openapi.ui.impl.DialogWrapperPeerImpl; <ide> import com.intellij.psi.PsiDirectory; <ide> import com.intellij.psi.PsiElement; <ide> import com.intellij.psi.PsiFile; <ide> text = doClone ? <ide> RefactoringBundle.message("copy.files.clone.file.0", file.getVirtualFile().getPresentableUrl()) : <ide> RefactoringBundle.message("copy.files.copy.file.0", file.getVirtualFile().getPresentableUrl()); <del> myNewNameField.setText(file.getName()); <add> final String fileName = file.getName(); <add> myNewNameField.setText(fileName); <add> final int dotIdx = fileName.lastIndexOf("."); <add> if (dotIdx > -1) { <add> myNewNameField.select(0, dotIdx); <add> myNewNameField.putClientProperty(DialogWrapperPeerImpl.HAVE_INITIAL_SELECTION, true); <add> } <ide> } <ide> else { <ide> PsiDirectory directory = (PsiDirectory)elements[0];
Java
apache-2.0
b899a04c73d7f55875f7b4ab9d5a88953ba44ba8
0
JoelMarcey/buck,JoelMarcey/buck,JoelMarcey/buck,JoelMarcey/buck,JoelMarcey/buck,JoelMarcey/buck,JoelMarcey/buck,JoelMarcey/buck,JoelMarcey/buck,JoelMarcey/buck,JoelMarcey/buck,JoelMarcey/buck,JoelMarcey/buck
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.downwardapi.processexecutor; import static com.facebook.buck.downward.model.EventTypeMessage.EventType.CHROME_TRACE_EVENT; import static com.facebook.buck.downward.model.EventTypeMessage.EventType.CONSOLE_EVENT; import static com.facebook.buck.downward.model.EventTypeMessage.EventType.LOG_EVENT; import static com.facebook.buck.downward.model.EventTypeMessage.EventType.STEP_EVENT; import static com.facebook.buck.downward.model.StepEvent.StepStatus.FINISHED; import static com.facebook.buck.downward.model.StepEvent.StepStatus.STARTED; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import com.facebook.buck.core.util.log.Logger; import com.facebook.buck.downward.model.ChromeTraceEvent; import com.facebook.buck.downward.model.ChromeTraceEvent.ChromeTraceEventStatus; import com.facebook.buck.downward.model.ConsoleEvent; import com.facebook.buck.downward.model.EventTypeMessage; import com.facebook.buck.downward.model.LogEvent; import com.facebook.buck.downward.model.LogLevel; import com.facebook.buck.downward.model.StepEvent; import com.facebook.buck.downwardapi.protocol.DownwardProtocol; import com.facebook.buck.downwardapi.protocol.DownwardProtocolType; import com.facebook.buck.event.BuckEvent; import com.facebook.buck.event.BuckEventBus; import com.facebook.buck.event.BuckEventBusForTests; import com.facebook.buck.event.SimplePerfEvent; import com.facebook.buck.event.external.events.StepEventExternalInterface; import com.facebook.buck.io.namedpipes.NamedPipe; import com.facebook.buck.io.namedpipes.NamedPipeFactory; import com.facebook.buck.testutil.TestLogSink; import com.facebook.buck.util.ConsoleParams; import com.facebook.buck.util.FakeProcess; import com.facebook.buck.util.FakeProcessExecutor; import com.facebook.buck.util.ProcessExecutorParams; import com.facebook.buck.util.Verbosity; import com.facebook.buck.util.timing.SettableFakeClock; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.eventbus.Subscribe; import com.google.protobuf.AbstractMessage; import com.google.protobuf.Duration; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; import java.time.Instant; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.logging.Level; import java.util.logging.LogRecord; import org.junit.Rule; import org.junit.Test; public class DownwardApiProcessExecutorTest { private static final Logger LOG = Logger.get(DownwardApiProcessExecutorTest.class); private static final String TEST_LOGGER_NAME = "crazy.tool.name"; private static final ConsoleParams CONSOLE_PARAMS = ConsoleParams.of(false, Verbosity.STANDARD_INFORMATION); private static final String TEST_COMMAND = "test_command"; private static final String TEST_ACTION_ID = "test_action_id"; @Rule public TestLogSink logSink = new TestLogSink(TEST_LOGGER_NAME); private static class TestListener { private final AtomicInteger counter = new AtomicInteger(-1); private final Map<Integer, BuckEvent> events = new HashMap<>(); @Subscribe public void console(com.facebook.buck.event.ConsoleEvent event) { handleEvent(event); } @Subscribe public void chromeTrace(SimplePerfEvent event) { handleEvent(event); } @Subscribe public void step(com.facebook.buck.step.StepEvent event) { handleEvent(event); } private void handleEvent(BuckEvent event) { events.put(counter.incrementAndGet(), event); } } @Test public void downwardApi() throws IOException, InterruptedException { NamedPipe namedPipe = NamedPipeFactory.getFactory().create(); TestListener listener = new TestListener(); Instant instant = Instant.now(); long epochSecond = instant.getEpochSecond(); BuckEventBus buckEventBus = BuckEventBusForTests.newInstance( new SettableFakeClock(instant.toEpochMilli(), instant.getNano())); buckEventBus.register(listener); ImmutableMap.Builder<String, String> envsBuilder = ImmutableMap.builder(); envsBuilder.put("SOME_ENV1", "VALUE1"); envsBuilder.put("SOME_ENV2", "VALUE2"); envsBuilder.put("BUCK_VERBOSITY", CONSOLE_PARAMS.getVerbosity()); envsBuilder.put("BUCK_ANSI_ENABLED", CONSOLE_PARAMS.isAnsiEscapeSequencesEnabled()); envsBuilder.put("BUCK_BUILD_UUID", buckEventBus.getBuildId().toString()); envsBuilder.put("BUCK_ACTION_ID", TEST_ACTION_ID); envsBuilder.put("BUCK_EVENT_PIPE", namedPipe.getName()); ImmutableMap<String, String> envs = envsBuilder.build(); ProcessExecutorParams params = ProcessExecutorParams.builder() .setCommand(ImmutableList.of(TEST_COMMAND)) .setEnvironment(envs) .build(); FakeProcess fakeProcess = new FakeProcess( Optional.of( () -> { try { process(namedPipe); } catch (Exception e) { throw new RuntimeException(e); } return Optional.empty(); })); ImmutableMap.Builder<ProcessExecutorParams, FakeProcess> fakeProcessesBuilder = ImmutableMap.<ProcessExecutorParams, FakeProcess>builder().put(params, fakeProcess); FakeProcessExecutor fakeProcessExecutor = new FakeProcessExecutor(fakeProcessesBuilder.build()); DownwardApiProcessExecutor processExecutor = new DownwardApiProcessExecutor( fakeProcessExecutor, CONSOLE_PARAMS, buckEventBus, TEST_ACTION_ID, () -> namedPipe); ProcessExecutorParams executorParams = ProcessExecutorParams.ofCommand(TEST_COMMAND) .withEnvironment( ImmutableMap.of( "SOME_ENV1", "VALUE1", "BUCK_BUILD_UUID", "TO_BE_REPLACED", "BUCK_ACTION_ID", "TO_BE_REPLACED", "SOME_ENV2", "VALUE2")); processExecutor.launchAndExecute(executorParams); Map<Integer, BuckEvent> events = listener.events; assertEquals(events.size(), 5); long currentThreadId = Thread.currentThread().getId(); for (BuckEvent buckEvent : events.values()) { assertEquals( "Thread id for events has to be equals to thread id of the invoking thread. Failed event: " + buckEvent, currentThreadId, buckEvent.getThreadId()); } // step start event verifyStepEvent( epochSecond, events.get(0), StepEventExternalInterface.STEP_STARTED, "crazy_stuff", "launched_process step started", 50); // console event verifyConsoleEvent(events.get(1)); // chrome trace start event verifyChromeTraceEvent( epochSecond, events.get(2), "category_1", SimplePerfEvent.Type.STARTED, ImmutableMap.of("key1", "value1", "key2", "value2"), 100); // step finished event verifyStepEvent( epochSecond, events.get(3), StepEventExternalInterface.STEP_FINISHED, "crazy_stuff", // the same as in started event "launched_process step started", 55); // chrome trace finished event verifyChromeTraceEvent( epochSecond, events.get(4), "category_1", SimplePerfEvent.Type.FINISHED, ImmutableMap.of("key3", "value3"), 150); // log event verifyLogEvent(); assertFalse("Named pipe file has to be deleted!", Files.exists(Paths.get(namedPipe.getName()))); } private void verifyLogEvent() { List<LogRecord> records = logSink.getRecords(); LogRecord logRecord = Iterables.getOnlyElement(records); assertThat(logRecord.getLevel(), equalTo(Level.WARNING)); assertThat( logRecord, TestLogSink.logRecordWithMessage(containsString("log message! show me to user!!!!"))); } private void verifyConsoleEvent(BuckEvent consoleEvent) { assertTrue(consoleEvent instanceof com.facebook.buck.event.ConsoleEvent); com.facebook.buck.event.ConsoleEvent buckConsoleEvent = (com.facebook.buck.event.ConsoleEvent) consoleEvent; assertEquals("console message! show me to user!!!!", buckConsoleEvent.getMessage()); assertEquals(Level.INFO, buckConsoleEvent.getLevel()); } private void verifyStepEvent( long epochSecond, BuckEvent buckEvent, String eventName, String category, String description, int expectedRelativeDuration) { assertTrue(buckEvent instanceof com.facebook.buck.step.StepEvent); com.facebook.buck.step.StepEvent event = (com.facebook.buck.step.StepEvent) buckEvent; assertEquals(eventName, event.getEventName()); assertEquals(category, event.getCategory()); assertEquals(description, event.getDescription()); long nanoTime = event.getNanoTime(); verifyDuration(epochSecond, expectedRelativeDuration, nanoTime); } private void verifyChromeTraceEvent( long epochSecond, BuckEvent chromeTraceEvent, String category, SimplePerfEvent.Type type, ImmutableMap<String, Object> attributes, int expectedRelativeTime) { assertTrue(chromeTraceEvent instanceof SimplePerfEvent); SimplePerfEvent simplePerfEvent = (SimplePerfEvent) chromeTraceEvent; assertEquals(category, simplePerfEvent.getCategory()); assertEquals(type, simplePerfEvent.getEventType()); assertEquals(attributes, simplePerfEvent.getEventInfo()); verifyDuration(epochSecond, expectedRelativeTime, simplePerfEvent.getNanoTime()); } private void verifyDuration(long epochSecond, int expectedRelativeDuration, long nanoTime) { long eventTimeInSeconds = TimeUnit.NANOSECONDS.toSeconds(nanoTime); long relativeTimeInSeconds = eventTimeInSeconds - epochSecond; int diffInSeconds = (int) (relativeTimeInSeconds - expectedRelativeDuration); int diffThreshold = 2; assertTrue( "Diff in seconds: " + diffInSeconds + " should be less than threshold: " + diffThreshold, diffInSeconds <= diffThreshold); } private void process(NamedPipe namedPipe) throws IOException, InterruptedException { try (OutputStream outputStream = namedPipe.getOutputStream()) { List<String> messages = getJsonRepresentationOfMessages(); for (String message : messages) { LOG.info("Writing into named pipe: %s%s", System.lineSeparator(), message); outputStream.write(message.getBytes(StandardCharsets.UTF_8)); TimeUnit.MILLISECONDS.sleep(100); } } } private ImmutableList<String> getJsonRepresentationOfMessages() throws IOException { DownwardProtocolType protocolType = DownwardProtocolType.JSON; DownwardProtocol downwardProtocol = protocolType.getDownwardProtocol(); ImmutableList.Builder<String> builder = ImmutableList.builder(); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); protocolType.writeDelimitedTo(outputStream); builder.add(outputStream.toString(StandardCharsets.UTF_8.name())); builder.add(stepEvent(downwardProtocol, STARTED, "launched_process step started", 50)); builder.add(consoleEvent(downwardProtocol)); builder.add(logEvent(downwardProtocol)); builder.add( chromeTraceEvent( downwardProtocol, ChromeTraceEventStatus.BEGIN, "category_1", 100, ImmutableMap.of("key1", "value1", "key2", "value2"))); builder.add(stepEvent(downwardProtocol, FINISHED, "launched_process step finished", 55)); builder.add( chromeTraceEvent( downwardProtocol, ChromeTraceEventStatus.END, "category_123", 150, ImmutableMap.of("key3", "value3"))); return builder.build(); } private String stepEvent( DownwardProtocol downwardProtocol, StepEvent.StepStatus started, String description, long durationSeconds) throws IOException { StepEvent stepEvent = StepEvent.newBuilder() .setEventId(123) .setStepStatus(started) .setStepType("crazy_stuff") .setDescription(description) .setDuration(Duration.newBuilder().setSeconds(durationSeconds).setNanos(10).build()) .build(); return write(downwardProtocol, STEP_EVENT, stepEvent); } private String consoleEvent(DownwardProtocol downwardProtocol) throws IOException { ConsoleEvent consoleEvent = ConsoleEvent.newBuilder() .setLogLevel(LogLevel.INFO) .setMessage("console message! show me to user!!!!") .build(); return write(downwardProtocol, CONSOLE_EVENT, consoleEvent); } private String logEvent(DownwardProtocol downwardProtocol) throws IOException { LogEvent logEvent = LogEvent.newBuilder() .setLogLevel(LogLevel.WARN) .setLoggerName("crazy.tool.name") .setMessage("log message! show me to user!!!!") .build(); return write(downwardProtocol, LOG_EVENT, logEvent); } private String chromeTraceEvent( DownwardProtocol downwardProtocol, ChromeTraceEventStatus status, String category, int relativeSeconds, ImmutableMap<String, String> attributes) throws IOException { ChromeTraceEvent chromeTraceEvent = ChromeTraceEvent.newBuilder() .setEventId(789) .setCategory(category) .setStatus(status) .setDuration(Duration.newBuilder().setSeconds(relativeSeconds).setNanos(10).build()) .putAllData(attributes) .build(); return write(downwardProtocol, CHROME_TRACE_EVENT, chromeTraceEvent); } private String write( DownwardProtocol downwardProtocol, EventTypeMessage.EventType eventType, AbstractMessage message) throws IOException { EventTypeMessage typeMessage = EventTypeMessage.newBuilder().setEventType(eventType).build(); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); downwardProtocol.write(typeMessage, outputStream); downwardProtocol.write(message, outputStream); return outputStream.toString(StandardCharsets.UTF_8.name()); } }
test/com/facebook/buck/downwardapi/processexecutor/DownwardApiProcessExecutorTest.java
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.downwardapi.processexecutor; import static com.facebook.buck.downward.model.EventTypeMessage.EventType.CHROME_TRACE_EVENT; import static com.facebook.buck.downward.model.EventTypeMessage.EventType.CONSOLE_EVENT; import static com.facebook.buck.downward.model.EventTypeMessage.EventType.LOG_EVENT; import static com.facebook.buck.downward.model.EventTypeMessage.EventType.STEP_EVENT; import static com.facebook.buck.downward.model.StepEvent.StepStatus.FINISHED; import static com.facebook.buck.downward.model.StepEvent.StepStatus.STARTED; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import com.facebook.buck.core.util.log.Logger; import com.facebook.buck.downward.model.ChromeTraceEvent; import com.facebook.buck.downward.model.ChromeTraceEvent.ChromeTraceEventStatus; import com.facebook.buck.downward.model.ConsoleEvent; import com.facebook.buck.downward.model.EventTypeMessage; import com.facebook.buck.downward.model.LogEvent; import com.facebook.buck.downward.model.LogLevel; import com.facebook.buck.downward.model.StepEvent; import com.facebook.buck.downwardapi.protocol.DownwardProtocol; import com.facebook.buck.downwardapi.protocol.DownwardProtocolType; import com.facebook.buck.event.BuckEvent; import com.facebook.buck.event.BuckEventBus; import com.facebook.buck.event.BuckEventBusForTests; import com.facebook.buck.event.SimplePerfEvent; import com.facebook.buck.event.external.events.StepEventExternalInterface; import com.facebook.buck.io.namedpipes.NamedPipe; import com.facebook.buck.io.namedpipes.NamedPipeFactory; import com.facebook.buck.testutil.TestLogSink; import com.facebook.buck.util.ConsoleParams; import com.facebook.buck.util.FakeProcess; import com.facebook.buck.util.FakeProcessExecutor; import com.facebook.buck.util.ProcessExecutorParams; import com.facebook.buck.util.Verbosity; import com.facebook.buck.util.timing.SettableFakeClock; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.eventbus.Subscribe; import com.google.protobuf.AbstractMessage; import com.google.protobuf.Duration; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; import java.time.Instant; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.logging.Level; import java.util.logging.LogRecord; import org.junit.Rule; import org.junit.Test; public class DownwardApiProcessExecutorTest { private static final Logger LOG = Logger.get(DownwardApiProcessExecutorTest.class); private static final String TEST_LOGGER_NAME = "crazy.tool.name"; private static final ConsoleParams CONSOLE_PARAMS = ConsoleParams.of(false, Verbosity.STANDARD_INFORMATION); private static final String TEST_COMMAND = "test_command"; private static final String TEST_ACTION_ID = "test_action_id"; @Rule public TestLogSink logSink = new TestLogSink(TEST_LOGGER_NAME); private static class TestListener { private final AtomicInteger counter = new AtomicInteger(-1); private final Map<Integer, BuckEvent> events = new HashMap<>(); @Subscribe public void console(com.facebook.buck.event.ConsoleEvent event) { handleEvent(event); } @Subscribe public void chromeTrace(SimplePerfEvent event) { handleEvent(event); } @Subscribe public void step(com.facebook.buck.step.StepEvent event) { handleEvent(event); } private void handleEvent(BuckEvent event) { events.put(counter.incrementAndGet(), event); } } @Test public void downwardApi() throws IOException, InterruptedException { NamedPipe namedPipe = NamedPipeFactory.getFactory().create(); TestListener listener = new TestListener(); Instant instant = Instant.now(); long epochSecond = instant.getEpochSecond(); BuckEventBus buckEventBus = BuckEventBusForTests.newInstance( new SettableFakeClock(instant.toEpochMilli(), instant.getNano())); buckEventBus.register(listener); ImmutableMap.Builder<String, String> envsBuilder = ImmutableMap.builder(); envsBuilder.put("SOME_ENV1", "VALUE1"); envsBuilder.put("SOME_ENV2", "VALUE2"); envsBuilder.put("BUCK_VERBOSITY", CONSOLE_PARAMS.getVerbosity()); envsBuilder.put("BUCK_ANSI_ENABLED", CONSOLE_PARAMS.isAnsiEscapeSequencesEnabled()); envsBuilder.put("BUCK_BUILD_UUID", buckEventBus.getBuildId().toString()); envsBuilder.put("BUCK_ACTION_ID", TEST_ACTION_ID); envsBuilder.put("BUCK_EVENT_PIPE", namedPipe.getName()); ImmutableMap<String, String> envs = envsBuilder.build(); ProcessExecutorParams params = ProcessExecutorParams.builder() .setCommand(ImmutableList.of(TEST_COMMAND)) .setEnvironment(envs) .build(); FakeProcess fakeProcess = new FakeProcess( Optional.of( () -> { try { process(namedPipe); } catch (Exception e) { throw new RuntimeException(e); } return Optional.empty(); })); ImmutableMap.Builder<ProcessExecutorParams, FakeProcess> fakeProcessesBuilder = ImmutableMap.<ProcessExecutorParams, FakeProcess>builder().put(params, fakeProcess); FakeProcessExecutor fakeProcessExecutor = new FakeProcessExecutor(fakeProcessesBuilder.build()); DownwardApiProcessExecutor processExecutor = new DownwardApiProcessExecutor( fakeProcessExecutor, CONSOLE_PARAMS, buckEventBus, TEST_ACTION_ID, () -> namedPipe); ProcessExecutorParams executorParams = ProcessExecutorParams.ofCommand(TEST_COMMAND) .withEnvironment( ImmutableMap.of( "SOME_ENV1", "VALUE1", "BUCK_BUILD_UUID", "TO_BE_REPLACED", "BUCK_ACTION_ID", "TO_BE_REPLACED", "SOME_ENV2", "VALUE2")); processExecutor.launchAndExecute(executorParams); Map<Integer, BuckEvent> events = listener.events; assertEquals(events.size(), 5); long currentThreadId = Thread.currentThread().getId(); for (BuckEvent buckEvent : events.values()) { assertEquals( "Thread id for events has to be equals to thread id of the invoking thread. Failed event: " + buckEvent, currentThreadId, buckEvent.getThreadId()); } // step start event verifyStepEvent( epochSecond, events.get(0), StepEventExternalInterface.STEP_STARTED, "crazy_stuff", "launched_process step started", 50); // console event verifyConsoleEvent(events.get(1)); // chrome trace start event verifyChromeTraceEvent( epochSecond, events.get(2), "category_1", SimplePerfEvent.Type.STARTED, ImmutableMap.of("key1", "value1", "key2", "value2"), 100); // step finished event verifyStepEvent( epochSecond, events.get(3), StepEventExternalInterface.STEP_FINISHED, "crazy_stuff", // the same as in started event "launched_process step started", 55); // chrome trace finished event verifyChromeTraceEvent( epochSecond, events.get(4), "category_1", SimplePerfEvent.Type.FINISHED, ImmutableMap.of("key3", "value3"), 150); // log event verifyLogEvent(); assertFalse("Named pipe file has to be deleted!", Files.exists(Paths.get(namedPipe.getName()))); } private void verifyLogEvent() { List<LogRecord> records = logSink.getRecords(); LogRecord logRecord = Iterables.getOnlyElement(records); assertThat(logRecord.getLevel(), equalTo(Level.WARNING)); assertThat( logRecord, TestLogSink.logRecordWithMessage(containsString("log message! show me to user!!!!"))); } private void verifyConsoleEvent(BuckEvent consoleEvent) { assertTrue(consoleEvent instanceof com.facebook.buck.event.ConsoleEvent); com.facebook.buck.event.ConsoleEvent buckConsoleEvent = (com.facebook.buck.event.ConsoleEvent) consoleEvent; assertEquals("console message! show me to user!!!!", buckConsoleEvent.getMessage()); assertEquals(Level.INFO, buckConsoleEvent.getLevel()); } private void verifyStepEvent( long epochSecond, BuckEvent buckEvent, String eventName, String category, String description, int expectedRelativeDuration) { assertTrue(buckEvent instanceof com.facebook.buck.step.StepEvent); com.facebook.buck.step.StepEvent event = (com.facebook.buck.step.StepEvent) buckEvent; assertEquals(eventName, event.getEventName()); assertEquals(category, event.getCategory()); assertEquals(description, event.getDescription()); long nanoTime = event.getNanoTime(); verifyDuration(epochSecond, expectedRelativeDuration, nanoTime); } private void verifyChromeTraceEvent( long epochSecond, BuckEvent chromeTraceEvent, String category, SimplePerfEvent.Type type, ImmutableMap<String, Object> attributes, int expectedRelativeTime) { assertTrue(chromeTraceEvent instanceof SimplePerfEvent); SimplePerfEvent simplePerfEvent = (SimplePerfEvent) chromeTraceEvent; assertEquals(category, simplePerfEvent.getCategory()); assertEquals(type, simplePerfEvent.getEventType()); assertEquals(attributes, simplePerfEvent.getEventInfo()); verifyDuration(epochSecond, expectedRelativeTime, simplePerfEvent.getNanoTime()); } private void verifyDuration(long epochSecond, int expectedRelativeDuration, long nanoTime) { long eventTimeInSeconds = TimeUnit.NANOSECONDS.toSeconds(nanoTime); long relativeTimeInSeconds = eventTimeInSeconds - epochSecond; assertTrue(relativeTimeInSeconds - expectedRelativeDuration <= 1); } private void process(NamedPipe namedPipe) throws IOException, InterruptedException { try (OutputStream outputStream = namedPipe.getOutputStream()) { List<String> messages = getJsonRepresentationOfMessages(); for (String message : messages) { LOG.info("Writing into named pipe: %s%s", System.lineSeparator(), message); outputStream.write(message.getBytes(StandardCharsets.UTF_8)); TimeUnit.MILLISECONDS.sleep(100); } } } private ImmutableList<String> getJsonRepresentationOfMessages() throws IOException { DownwardProtocolType protocolType = DownwardProtocolType.JSON; DownwardProtocol downwardProtocol = protocolType.getDownwardProtocol(); ImmutableList.Builder<String> builder = ImmutableList.builder(); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); protocolType.writeDelimitedTo(outputStream); builder.add(outputStream.toString(StandardCharsets.UTF_8.name())); builder.add(stepEvent(downwardProtocol, STARTED, "launched_process step started", 50)); builder.add(consoleEvent(downwardProtocol)); builder.add(logEvent(downwardProtocol)); builder.add( chromeTraceEvent( downwardProtocol, ChromeTraceEventStatus.BEGIN, "category_1", 100, ImmutableMap.of("key1", "value1", "key2", "value2"))); builder.add(stepEvent(downwardProtocol, FINISHED, "launched_process step finished", 55)); builder.add( chromeTraceEvent( downwardProtocol, ChromeTraceEventStatus.END, "category_123", 150, ImmutableMap.of("key3", "value3"))); return builder.build(); } private String stepEvent( DownwardProtocol downwardProtocol, StepEvent.StepStatus started, String description, long durationSeconds) throws IOException { StepEvent stepEvent = StepEvent.newBuilder() .setEventId(123) .setStepStatus(started) .setStepType("crazy_stuff") .setDescription(description) .setDuration(Duration.newBuilder().setSeconds(durationSeconds).setNanos(10).build()) .build(); return write(downwardProtocol, STEP_EVENT, stepEvent); } private String consoleEvent(DownwardProtocol downwardProtocol) throws IOException { ConsoleEvent consoleEvent = ConsoleEvent.newBuilder() .setLogLevel(LogLevel.INFO) .setMessage("console message! show me to user!!!!") .build(); return write(downwardProtocol, CONSOLE_EVENT, consoleEvent); } private String logEvent(DownwardProtocol downwardProtocol) throws IOException { LogEvent logEvent = LogEvent.newBuilder() .setLogLevel(LogLevel.WARN) .setLoggerName("crazy.tool.name") .setMessage("log message! show me to user!!!!") .build(); return write(downwardProtocol, LOG_EVENT, logEvent); } private String chromeTraceEvent( DownwardProtocol downwardProtocol, ChromeTraceEventStatus status, String category, int relativeSeconds, ImmutableMap<String, String> attributes) throws IOException { ChromeTraceEvent chromeTraceEvent = ChromeTraceEvent.newBuilder() .setEventId(789) .setCategory(category) .setStatus(status) .setDuration(Duration.newBuilder().setSeconds(relativeSeconds).setNanos(10).build()) .putAllData(attributes) .build(); return write(downwardProtocol, CHROME_TRACE_EVENT, chromeTraceEvent); } private String write( DownwardProtocol downwardProtocol, EventTypeMessage.EventType eventType, AbstractMessage message) throws IOException { EventTypeMessage typeMessage = EventTypeMessage.newBuilder().setEventType(eventType).build(); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); downwardProtocol.write(typeMessage, outputStream); downwardProtocol.write(message, outputStream); return outputStream.toString(StandardCharsets.UTF_8.name()); } }
DownwardApiProcessExecutorTest fix: Increase duration threshold from 1s to 2s Summary: ^ + More readable error message. Reviewed By: swgillespie shipit-source-id: 6832ea9c9ee453734307f512a2e51bc14492be3d
test/com/facebook/buck/downwardapi/processexecutor/DownwardApiProcessExecutorTest.java
DownwardApiProcessExecutorTest fix: Increase duration threshold from 1s to 2s
<ide><path>est/com/facebook/buck/downwardapi/processexecutor/DownwardApiProcessExecutorTest.java <ide> private void verifyDuration(long epochSecond, int expectedRelativeDuration, long nanoTime) { <ide> long eventTimeInSeconds = TimeUnit.NANOSECONDS.toSeconds(nanoTime); <ide> long relativeTimeInSeconds = eventTimeInSeconds - epochSecond; <del> assertTrue(relativeTimeInSeconds - expectedRelativeDuration <= 1); <add> int diffInSeconds = (int) (relativeTimeInSeconds - expectedRelativeDuration); <add> int diffThreshold = 2; <add> assertTrue( <add> "Diff in seconds: " + diffInSeconds + " should be less than threshold: " + diffThreshold, <add> diffInSeconds <= diffThreshold); <ide> } <ide> <ide> private void process(NamedPipe namedPipe) throws IOException, InterruptedException {
Java
apache-2.0
09d54f9bb0783942a4fd538b846aad4d1c050777
0
googlegsa/manager.v3,googlegsa/manager.v3,googlegsa/manager.v3
// Copyright 2006 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.enterprise.connector.spi; import com.google.common.collect.ImmutableMap; import java.util.Map; /** * Non-instantiable class that holds constants used by the SPI and * documents their meanings. * <p> * All constants whose names begin with PROPNAME are reserved names for * properties that may be accessed from a Document returned as a query result. * The actual values of these property name constants all begin with "google:". * For future compatibility, all property names beginning with "google:" are * reserved. */ public class SpiConstants { private SpiConstants() { // prevents instantiation } /** * The prefix for the reserved property names. * <p> * Value: "google:" * * @since 2.6.6 */ public static final String RESERVED_PROPNAME_PREFIX = "google:"; /** * Identifies a single-valued, string property that uniquely identifies a * document to this connector. The internal structure of this string is * opaque to the Search Appliance. Only printable, non-whitespace, ASCII * characters are permitted in a DOCID. * <p> * This property is required on all Documents. The connector implementor is * encouraged to implement this by using the natural ID in the foreign * repository. * <p> * This property is persistable (it is one of the keys in the * {@link #PERSISTABLE_ATTRIBUTES} map. * <p> * Value: google:docid */ public static final String PROPNAME_DOCID = "google:docid"; /** * Identifies a single-valued, date property that gives the last modified * date of a document. This property is optional but strongly recommended in * order to associate a specific date to the document. * <p> * Value: google:lastmodify */ public static final String PROPNAME_LASTMODIFIED = "google:lastmodified"; /** * Identifies an optional string property that is the title of the document. * This value is useful for providing a title for documents that supply no * content, or for which a title cannot be automatically extracted from the * supplied content. * <p> * Value: google:title */ public static final String PROPNAME_TITLE = "google:title"; /** * Reserved for future use. * <p> * Value: google:contenturl */ public static final String PROPNAME_CONTENTURL = "google:contenturl"; /** * Identifies a single-valued FeedType property that, if present, will be * used to determine the feed type for this document. It is strongly * recommended that this property be set to explicitly determine the feed * type ('content' or 'web') for the document. * <p> * If this property is not set, the feed type will be determined as follows: * <ol> * <li>If there is no {@link #PROPNAME_SEARCHURL} then the feed type will * default to 'content' feed using a fabricated URL derived from the * {@link #PROPNAME_DOCID}. * <li>If there is a {@link #PROPNAME_SEARCHURL} then the feed type will * default to 'web' feed and use the {@link #PROPNAME_SEARCHURL} as the * document URL. * </ol> * <p> * Value: google:feedtype * * @since 2.4.2 */ public static final String PROPNAME_FEEDTYPE = "google:feedtype"; /** * Identifies a single-valued Feed ID property that, if present, will be * used to identify the feed file that contains a fed document. All feed * records in a single feed file will share a common google:feedid value. * <p> * Reserved for internal use. * <p> * Value: google:feedid * * @since 2.6 */ public static final String PROPNAME_FEEDID = "google:feedid"; /** * Identifies an optional single-valued string property that, if present, * will be used by the Search Appliance as the primary URI for this document * - instead of the normal googleconnector:// URI which the connector manager * fabricates based on the {@link #PROPNAME_DOCID} and the connector name. * <p> * Value: google:searchurl */ public static final String PROPNAME_SEARCHURL = "google:searchurl"; /** * Identifies a single-valued property that may be either string or * binary and gives direct access to the primary content to be indexed. * <p> * Value: google:content */ public static final String PROPNAME_CONTENT = "google:content"; /** * Identifies a single-valued string property that serves as a security * token. At serve time, the Search Appliance presents this token along * with the querying user's identity, and the connector tells us whether * this user has permission to view a document of this class. This may be * implemented by a textual pointer to an ACL. * <p> * Value: google:securitytoken */ public static final String PROPNAME_SECURITYTOKEN = "google:securitytoken"; /** * Identifies an single-valued String property that gives the mime type * for the content of this document. If this is not supplied, then the * system will use the value of DEFAULT_MIMETYPE. * <p> * Value: google:mimetype */ public static final String PROPNAME_MIMETYPE = "google:mimetype"; /** * The mime type that the connector manager uses as a default, if a * document does not specify. * <p> * Value: text/html */ public static final String DEFAULT_MIMETYPE = "text/html"; /** * Identifies an optional, single-valued property that gives a URL that * should be used in a results page as the primary user reference for a * document. This may be different from the contenturl, if present: * contenturl should give direct access to the content file, whereas * displayurl may point into the CMS's web front-end application. * <p> * Value: google:displayurl */ public static final String PROPNAME_DISPLAYURL = "google:displayurl"; /** * Unless this property is present and is false, then the document will * be marked as public. * <p> * Value: google:ispublic */ public static final String PROPNAME_ISPUBLIC = "google:ispublic"; /** * Identifies a multiple-valued String property that gives the list of * group ACL Scope IDs that are permitted RoleType.READER access to this * document. If either of the PROPNAME_ACLGROUPS or PROPNAME_ACLUSERS * properties are non-null, then the GSA will grant or deny access to this * document for a given user on the basis of whether the user's name appears * as one of the Scope IDs in the PROPNAME_ACLUSERS list or one of the user's * groups appears as one of the Scope IDs in the PROPNAME_ACLGROUPS list. * <p> * ACL Scope ID is a group or user name within the scope of the Connector. * <p> * To specify more than just RoleType.READER access to the document, the * Connector must add additional multi-value role properties to the document. * These entries are of the form: * * <pre> * Name = &lt;GROUP_ROLES_PROPNAME_PREFIX&gt; + &lt;scopeId&gt; * Value = [RoleType[, ...]] * </pre> * * where &lt;GROUP_ROLES_PROPNAME_PREFIX&gt; is the * {@link #GROUP_ROLES_PROPNAME_PREFIX}, &lt;scopeId&gt; is the group ACL * Scope ID, and RoleType is one of the possible RoleType values. User ACL * Roles are of the form: * * <pre> * Name = &lt;USER_ROLES_PROPNAME_PREFIX&gt; + &lt;scopeId&gt; * Value = [RoleType[, ...]] * </pre> * * where the &lt;scopeId&gt; will be the user ACL Scope ID. * <p> * If the PROPNAME_ISPUBLIC is missing or is true, then this property is * ignored, since the document is public. * <p> * If both the PROPNAME_ACLGROUPS and PROPNAME_ACLUSERS properties are null or * empty, then the GSA will use the authorization SPI to grant or deny access * to this document. * <p> * The GSA may be configured to bypass on-board authorization, in which case * these properties will be ignored, and the GSA will use the authorization * SPI to grant or deny access to this document. * <p> * Value: google:aclgroups */ public static final String PROPNAME_ACLGROUPS = "google:aclgroups"; /** * Identifies a multiple-valued String property that gives the list of * users that are permitted access to this document. For details, see * the {@link #PROPNAME_ACLGROUPS}. * <p> * Value: google:aclusers */ public static final String PROPNAME_ACLUSERS = "google:aclusers"; /** * Prefix added to the front of the group ACL Scope ID when creating a group * roles property name. If the Connector wants to define specific roles * associated with a group ACL Scope ID related to a document they should be * stored in a multi-valued property named: * * <pre> * GROUP_ROLES_PROPNAME_PREFIX + &lt;scopeId&gt; * </pre> * * For example, given a group ACL Entry of "eng=reader,writer" the roles for * "eng" would be stored in a property as follows: * * <pre> * Name = "google:group:roles:eng" * Value = [reader, writer] * </pre> */ public static final String GROUP_ROLES_PROPNAME_PREFIX = "google:group:roles:"; /** * Prefix added to the front of the user ACL Scope ID when creating a user * roles property name. If the Connector wants to define specific roles * associated with a user ACL Scope ID related to a document they should be * stored in a multi-valued property named: * * <pre> * USER_ROLES_PROPNAME_PREFIX + &lt;scopeId&gt; * </pre> * * For example, given a user ACL Entry of "joe=reader,writer" the roles for * "joe" would be stored in a property as follows: * * <pre> * Name = "google:user:roles:joe" * Value = [reader, writer] * </pre> */ public static final String USER_ROLES_PROPNAME_PREFIX = "google:user:roles:"; /** * Identifies an optional, single-valued property that specifies the action * associated with the document. If not specified, then the system will * not specify the action and the default behavior will be observed. * <p> * Value: google:action */ public static final String PROPNAME_ACTION = "google:action"; /** * Identifies an optional, multi-valued property that specifies the * folder path of the document. The document name should not be * included in the path. Multiple values are permitted to support * repositories that link documents to multiple parent folders. * <p> * Examples: * * <pre> * /ENGINEERING/techdoc/pdfs * Enterprise:Marketing:Press Releases * https://sp.example.com/sites/mylist * </pre> * <p> * Value: google:folder * * @see "RFC 3986: Uniform Resource Identifier (URI): Generic Syntax" * @since 2.6.6 */ public static final String PROPNAME_FOLDER = "google:folder"; /** * Identifies an optional, single-valued boolean property that specifies * whether the document should be locked, to prevent it from being evicted * if the GSA reaches its license limit. Default: {@code false}. * <p/> * Note: this property will not be indexed, it only controls whether the GSA * will lock the document. * <p/> * Value: google:lock * * @see "<a href='http://code.google.com/apis/searchappliance/documentation/62/feedsguide.html#defining_the_xml'>Defining the XML Record for a Document</a>" * @since 2.6.4 */ public static final String PROPNAME_LOCK = "google:lock"; /** * Enum for the list of possible feed types. * * @since 2.4.2 */ public enum FeedType { CONTENT, WEB } /** * Enum for action types. */ public enum ActionType { ADD("add"), DELETE("delete"), ERROR("error"), SKIPPED("skipped"); private final String tag; ActionType(String m) { tag = m; } /** * @return The enum matching the given <code>tag</code>. * <code>ActionType.ERROR</code> will be returned if the given * <code>tag</code> does not match a known <code>ActionType</code>. */ public static ActionType findActionType(String tag) { try { return Enum.valueOf(ActionType.class, tag.toUpperCase()); } catch (IllegalArgumentException e) { // Not found, return ERROR. return ERROR; } } @Override public String toString() { return tag; } } /** * Enum for known role types. */ public enum RoleType { PEEKER("peeker"), READER("reader"), WRITER("writer"), OWNER("owner"), ERROR("error"); private final String tag; RoleType(String m) { tag = m; } /** * @return The enum matching the given <code>tag</code>. * <code>RoleType.ERROR</code> will be returned if the given * <code>tag</code> does not match a known <code>RoleType</code>. */ public static RoleType findRoleType(String tag) { try { return Enum.valueOf(RoleType.class, tag.toUpperCase()); } catch (IllegalArgumentException e) { // Not found, return ERROR. return ERROR; } } @Override public String toString() { return tag; } } /** * This enumeration identifies databases. */ public enum DatabaseType { OTHER("other"), ORACLE("oracle"), SQLSERVER("sqlserver"), H2("h2"), MYSQL("mysql"), ; private final String tag; private DatabaseType(String tag) { this.tag = tag; } /** * @return The enum matching the given {@code tag}, or * {@code OTHER} if a match is not found. */ public static DatabaseType findDatabaseType(String tag) { try { return Enum.valueOf(DatabaseType.class, tag.toUpperCase()); } catch (IllegalArgumentException e) { // Not found, return OTHER. return OTHER; } } @Override public String toString() { return tag; } } /** * A map keyed by property names, giving corresponding column names. If a * property name is a key in this map, then it can be persisted by the * Connector * Manager in its per-document store. The associated value gives the name that * a connector implementor should use to read records from the * per-document store using jdbc. However, implementors are encouraged to use * {@link LocalDocumentStore} methods rather than jdbc if possible. * <p/> * At present, the persistable attributes are: * <ul> * <li>{@link #PROPNAME_CONNECTOR_INSTANCE}</li> * <li>{@link #PROPNAME_CONNECTOR_TYPE}</li> * <li>{@link #PROPNAME_DOCID}</li> * <li>{@link #PROPNAME_FEEDID}</li> * <li>{@link #PROPNAME_PRIMARY_FOLDER}</li> * <li>{@link #PROPNAME_ACTION}</li> * <li>{@link #PROPNAME_TIMESTAMP}</li> * <li>{@link #PROPNAME_MESSAGE}</li> * <li>{@link #PROPNAME_SNAPSHOT}</li> * <li>{@link #PROPNAME_CONTAINER}</li> * <li>{@link #PROPNAME_PERSISTED_CUSTOMDATA_1}</li> * <li>{@link #PROPNAME_PERSISTED_CUSTOMDATA_2}</li> * </ul> */ public static final Map<String, String> PERSISTABLE_ATTRIBUTES; /** * Optional single-valued, boolean property that marks this document as one * the Connector Manager should persist locally in its per-document store. If * not present, this is assumed to be false. If true, then the Connector * Manager will persist all attributes that are keys in the * {@link #PERSISTABLE_ATTRIBUTES} map. */ public static final String PROPNAME_MANAGER_SHOULD_PERSIST = "google:persist"; /** * Reserved by the Connector Manager to indicate the connector instance * that submitted this document. Should not be supplied by the * connector developer, and if supplied, it will be ignored. * <p> * This property is persistable (it is one of the keys in the * {@link #PERSISTABLE_ATTRIBUTES} map. */ public static final String PROPNAME_CONNECTOR_INSTANCE = "google:connector_instance"; /** * Reserved by the Connector Manager to indicate type of the connector * that submitted this document. Should not be supplied by the * connector developer, and if supplied, it will be ignored. * <p> * This property is persistable (it is one of the keys in the * {@link #PERSISTABLE_ATTRIBUTES} map. */ public static final String PROPNAME_CONNECTOR_TYPE = "google:connector_type"; /** * Optional, single-valued property that gives the name primary folder in * which this document lives. If not supplied, but the * {@link #PROPNAME_FOLDER} property is supplied, then the first value of * that multi-valued property will be used here. The primary use-case of this * attribute is to be stored, so that a connector can later query to find * all documents in a folder. * <p> * This property is persistable (it is one of the keys in the * {@link #PERSISTABLE_ATTRIBUTES} map. */ public static final String PROPNAME_PRIMARY_FOLDER = "google:primary_folder"; /** * Reserved by the Connector Manager to indicate the time at which the * Connector Manager handled this document. Should not be supplied by the * connector developer, and if supplied, it will be ignored. * <p> * This property is persistable (it is one of the keys in the * {@link #PERSISTABLE_ATTRIBUTES} map. */ public static final String PROPNAME_TIMESTAMP = "google:timestamp"; /** * Optional, single-valued property that gives a message from the connector * instance about the state of this document. * <p> * This property is persistable (it is one of the keys in the * {@link #PERSISTABLE_ATTRIBUTES} map. */ public static final String PROPNAME_MESSAGE = "google:message"; /** * Optional, single-valued property that gives a compact representation of a * document's content and attributes, to enable a quick comparison with a * foreign repository to see if the document has changed. For example, this * attribute might contain a content hash. The primary use-case of this * attribute is to be stored in the Connector Manager's per-document store. It * will not be supplied when sending a document to the GSA for indexing. * <p> * This property is persistable (it is one of the keys in the * {@link #PERSISTABLE_ATTRIBUTES} map. */ public static final String PROPNAME_SNAPSHOT = "google:snapshot"; /** * Optional, single-valued property that gives the name of the high-level * container object * in which the document lives. This may be an object such as a cabinet or * list. * The primary use-case of this * attribute is to be stored, so that a connector can later query to find * all documents in a container. * <p> * This property is persistable (it is one of the keys in the * {@link #PERSISTABLE_ATTRIBUTES} map. */ public static final String PROPNAME_CONTAINER = "google:container"; /** * Optional, single-valued property the Connector Manager will persist in its * per-document store. This property will not be supplied when sending a * document to the GSA for indexing. */ public static final String PROPNAME_PERSISTED_CUSTOMDATA_1 = "google:custom1"; /** * Optional, single-valued property the Connector Manager will persist in its * per-document store. This property will not be supplied when sending a * document to the GSA for indexing. */ public static final String PROPNAME_PERSISTED_CUSTOMDATA_2 = "google:custom2"; static { PERSISTABLE_ATTRIBUTES = ImmutableMap.<String, String> builder(). put(PROPNAME_DOCID, "docid"). put(PROPNAME_FEEDID, "feedid"). put(PROPNAME_PRIMARY_FOLDER, "folderparent"). put(PROPNAME_ACTION, "action"). put(PROPNAME_TIMESTAMP, "timestamp"). put(PROPNAME_MESSAGE, "message"). put(PROPNAME_SNAPSHOT, "snapshot"). put(PROPNAME_CONTAINER, "container"). put(PROPNAME_PERSISTED_CUSTOMDATA_1, "custom1"). put(PROPNAME_PERSISTED_CUSTOMDATA_2, "custom2"). build(); } }
projects/connector-manager/source/java/com/google/enterprise/connector/spi/SpiConstants.java
// Copyright 2006 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.enterprise.connector.spi; import com.google.common.collect.ImmutableMap; import java.util.Map; /** * Non-instantiable class that holds constants used by the SPI and * documents their meanings. * <p> * All constants whose names begin with PROPNAME are reserved names for * properties that may be accessed from a Document returned as a query result. * The actual values of these property name constants all begin with "google:". * For future compatibility, all property names beginning with "google:" are * reserved. */ public class SpiConstants { private SpiConstants() { // prevents instantiation } /** * The prefix for the reserved property names. * <p> * Value: "google:" * * @since 3.0 */ public static final String RESERVED_PROPNAME_PREFIX = "google:"; /** * Identifies a single-valued, string property that uniquely identifies a * document to this connector. The internal structure of this string is * opaque to the Search Appliance. Only printable, non-whitespace, ASCII * characters are permitted in a DOCID. * <p> * This property is required on all Documents. The connector implementor is * encouraged to implement this by using the natural ID in the foreign * repository. * <p> * This property is persistable (it is one of the keys in the * {@link #PERSISTABLE_ATTRIBUTES} map. * <p> * Value: google:docid */ public static final String PROPNAME_DOCID = "google:docid"; /** * Identifies a single-valued, date property that gives the last modified * date of a document. This property is optional but strongly recommended in * order to associate a specific date to the document. * <p> * Value: google:lastmodify */ public static final String PROPNAME_LASTMODIFIED = "google:lastmodified"; /** * Identifies an optional string property that is the title of the document. * This value is useful for providing a title for documents that supply no * content, or for which a title cannot be automatically extracted from the * supplied content. * <p> * Value: google:title */ public static final String PROPNAME_TITLE = "google:title"; /** * Reserved for future use. * <p> * Value: google:contenturl */ public static final String PROPNAME_CONTENTURL = "google:contenturl"; /** * Identifies a single-valued FeedType property that, if present, will be * used to determine the feed type for this document. It is strongly * recommended that this property be set to explicitly determine the feed * type ('content' or 'web') for the document. * <p> * If this property is not set, the feed type will be determined as follows: * <ol> * <li>If there is no {@link #PROPNAME_SEARCHURL} then the feed type will * default to 'content' feed using a fabricated URL derived from the * {@link #PROPNAME_DOCID}. * <li>If there is a {@link #PROPNAME_SEARCHURL} then the feed type will * default to 'web' feed and use the {@link #PROPNAME_SEARCHURL} as the * document URL. * </ol> * <p> * Value: google:feedtype * * @since 2.4.2 */ public static final String PROPNAME_FEEDTYPE = "google:feedtype"; /** * Identifies a single-valued Feed ID property that, if present, will be * used to identify the feed file that contains a fed document. All feed * records in a single feed file will share a common google:feedid value. * <p> * Reserved for internal use. * <p> * Value: google:feedid * * @since 2.6 */ public static final String PROPNAME_FEEDID = "google:feedid"; /** * Identifies an optional single-valued string property that, if present, * will be used by the Search Appliance as the primary URI for this document * - instead of the normal googleconnector:// URI which the connector manager * fabricates based on the {@link #PROPNAME_DOCID} and the connector name. * <p> * Value: google:searchurl */ public static final String PROPNAME_SEARCHURL = "google:searchurl"; /** * Identifies a single-valued property that may be either string or * binary and gives direct access to the primary content to be indexed. * <p> * Value: google:content */ public static final String PROPNAME_CONTENT = "google:content"; /** * Identifies a single-valued string property that serves as a security * token. At serve time, the Search Appliance presents this token along * with the querying user's identity, and the connector tells us whether * this user has permission to view a document of this class. This may be * implemented by a textual pointer to an ACL. * <p> * Value: google:securitytoken */ public static final String PROPNAME_SECURITYTOKEN = "google:securitytoken"; /** * Identifies an single-valued String property that gives the mime type * for the content of this document. If this is not supplied, then the * system will use the value of DEFAULT_MIMETYPE. * <p> * Value: google:mimetype */ public static final String PROPNAME_MIMETYPE = "google:mimetype"; /** * The mime type that the connector manager uses as a default, if a * document does not specify. * <p> * Value: text/html */ public static final String DEFAULT_MIMETYPE = "text/html"; /** * Identifies an optional, single-valued property that gives a URL that * should be used in a results page as the primary user reference for a * document. This may be different from the contenturl, if present: * contenturl should give direct access to the content file, whereas * displayurl may point into the CMS's web front-end application. * <p> * Value: google:displayurl */ public static final String PROPNAME_DISPLAYURL = "google:displayurl"; /** * Unless this property is present and is false, then the document will * be marked as public. * <p> * Value: google:ispublic */ public static final String PROPNAME_ISPUBLIC = "google:ispublic"; /** * Identifies a multiple-valued String property that gives the list of * group ACL Scope IDs that are permitted RoleType.READER access to this * document. If either of the PROPNAME_ACLGROUPS or PROPNAME_ACLUSERS * properties are non-null, then the GSA will grant or deny access to this * document for a given user on the basis of whether the user's name appears * as one of the Scope IDs in the PROPNAME_ACLUSERS list or one of the user's * groups appears as one of the Scope IDs in the PROPNAME_ACLGROUPS list. * <p> * ACL Scope ID is a group or user name within the scope of the Connector. * <p> * To specify more than just RoleType.READER access to the document, the * Connector must add additional multi-value role properties to the document. * These entries are of the form: * * <pre> * Name = &lt;GROUP_ROLES_PROPNAME_PREFIX&gt; + &lt;scopeId&gt; * Value = [RoleType[, ...]] * </pre> * * where &lt;GROUP_ROLES_PROPNAME_PREFIX&gt; is the * {@link #GROUP_ROLES_PROPNAME_PREFIX}, &lt;scopeId&gt; is the group ACL * Scope ID, and RoleType is one of the possible RoleType values. User ACL * Roles are of the form: * * <pre> * Name = &lt;USER_ROLES_PROPNAME_PREFIX&gt; + &lt;scopeId&gt; * Value = [RoleType[, ...]] * </pre> * * where the &lt;scopeId&gt; will be the user ACL Scope ID. * <p> * If the PROPNAME_ISPUBLIC is missing or is true, then this property is * ignored, since the document is public. * <p> * If both the PROPNAME_ACLGROUPS and PROPNAME_ACLUSERS properties are null or * empty, then the GSA will use the authorization SPI to grant or deny access * to this document. * <p> * The GSA may be configured to bypass on-board authorization, in which case * these properties will be ignored, and the GSA will use the authorization * SPI to grant or deny access to this document. * <p> * Value: google:aclgroups */ public static final String PROPNAME_ACLGROUPS = "google:aclgroups"; /** * Identifies a multiple-valued String property that gives the list of * users that are permitted access to this document. For details, see * the {@link #PROPNAME_ACLGROUPS}. * <p> * Value: google:aclusers */ public static final String PROPNAME_ACLUSERS = "google:aclusers"; /** * Prefix added to the front of the group ACL Scope ID when creating a group * roles property name. If the Connector wants to define specific roles * associated with a group ACL Scope ID related to a document they should be * stored in a multi-valued property named: * * <pre> * GROUP_ROLES_PROPNAME_PREFIX + &lt;scopeId&gt; * </pre> * * For example, given a group ACL Entry of "eng=reader,writer" the roles for * "eng" would be stored in a property as follows: * * <pre> * Name = "google:group:roles:eng" * Value = [reader, writer] * </pre> */ public static final String GROUP_ROLES_PROPNAME_PREFIX = "google:group:roles:"; /** * Prefix added to the front of the user ACL Scope ID when creating a user * roles property name. If the Connector wants to define specific roles * associated with a user ACL Scope ID related to a document they should be * stored in a multi-valued property named: * * <pre> * USER_ROLES_PROPNAME_PREFIX + &lt;scopeId&gt; * </pre> * * For example, given a user ACL Entry of "joe=reader,writer" the roles for * "joe" would be stored in a property as follows: * * <pre> * Name = "google:user:roles:joe" * Value = [reader, writer] * </pre> */ public static final String USER_ROLES_PROPNAME_PREFIX = "google:user:roles:"; /** * Identifies an optional, single-valued property that specifies the action * associated with the document. If not specified, then the system will * not specify the action and the default behavior will be observed. * <p> * Value: google:action */ public static final String PROPNAME_ACTION = "google:action"; /** * Identifies an optional, multi-valued property that specifies the * folder path of the document. The document name should not be * included in the path. Multiple values are permitted to support * repositories that link documents to multiple parent folders. * <p> * Examples: * * <pre> * /ENGINEERING/techdoc/pdfs * Enterprise:Marketing:Press Releases * https://sp.example.com/sites/mylist * </pre> * <p> * Value: google:folder * * @see "RFC 3986: Uniform Resource Identifier (URI): Generic Syntax" * @since 3.0 */ public static final String PROPNAME_FOLDER = "google:folder"; /** * Identifies an optional, single-valued boolean property that specifies * whether the document should be locked, to prevent it from being evicted * if the GSA reaches its license limit. Default: {@code false}. * <p/> * Note: this property will not be indexed, it only controls whether the GSA * will lock the document. * <p/> * Value: google:lock * * @see "<a href='http://code.google.com/apis/searchappliance/documentation/62/feedsguide.html#defining_the_xml'>Defining the XML Record for a Document</a>" * */ public static final String PROPNAME_LOCK = "google:lock"; /** * Enum for the list of possible feed types. * * @since 2.4.2 */ public enum FeedType { CONTENT, WEB } /** * Enum for action types. */ public enum ActionType { ADD("add"), DELETE("delete"), ERROR("error"), SKIPPED("skipped"); private final String tag; ActionType(String m) { tag = m; } /** * @return The enum matching the given <code>tag</code>. * <code>ActionType.ERROR</code> will be returned if the given * <code>tag</code> does not match a known <code>ActionType</code>. */ public static ActionType findActionType(String tag) { try { return Enum.valueOf(ActionType.class, tag.toUpperCase()); } catch (IllegalArgumentException e) { // Not found, return ERROR. return ERROR; } } @Override public String toString() { return tag; } } /** * Enum for known role types. */ public enum RoleType { PEEKER("peeker"), READER("reader"), WRITER("writer"), OWNER("owner"), ERROR("error"); private final String tag; RoleType(String m) { tag = m; } /** * @return The enum matching the given <code>tag</code>. * <code>RoleType.ERROR</code> will be returned if the given * <code>tag</code> does not match a known <code>RoleType</code>. */ public static RoleType findRoleType(String tag) { try { return Enum.valueOf(RoleType.class, tag.toUpperCase()); } catch (IllegalArgumentException e) { // Not found, return ERROR. return ERROR; } } @Override public String toString() { return tag; } } /** * This enumeration identifies databases. */ public enum DatabaseType { OTHER("other"), ORACLE("oracle"), SQLSERVER("sqlserver"), H2("h2"), MYSQL("mysql"), ; private final String tag; private DatabaseType(String tag) { this.tag = tag; } /** * @return The enum matching the given {@code tag}, or * {@code OTHER} if a match is not found. */ public static DatabaseType findDatabaseType(String tag) { try { return Enum.valueOf(DatabaseType.class, tag.toUpperCase()); } catch (IllegalArgumentException e) { // Not found, return OTHER. return OTHER; } } @Override public String toString() { return tag; } } /** * A map keyed by property names, giving corresponding column names. If a * property name is a key in this map, then it can be persisted by the * Connector * Manager in its per-document store. The associated value gives the name that * a connector implementor should use to read records from the * per-document store using jdbc. However, implementors are encouraged to use * {@link LocalDocumentStore} methods rather than jdbc if possible. * <p/> * At present, the persistable attributes are: * <ul> * <li>{@link #PROPNAME_CONNECTOR_INSTANCE}</li> * <li>{@link #PROPNAME_CONNECTOR_TYPE}</li> * <li>{@link #PROPNAME_DOCID}</li> * <li>{@link #PROPNAME_FEEDID}</li> * <li>{@link #PROPNAME_PRIMARY_FOLDER}</li> * <li>{@link #PROPNAME_ACTION}</li> * <li>{@link #PROPNAME_TIMESTAMP}</li> * <li>{@link #PROPNAME_MESSAGE}</li> * <li>{@link #PROPNAME_SNAPSHOT}</li> * <li>{@link #PROPNAME_CONTAINER}</li> * <li>{@link #PROPNAME_PERSISTED_CUSTOMDATA_1}</li> * <li>{@link #PROPNAME_PERSISTED_CUSTOMDATA_2}</li> * </ul> */ public static final Map<String, String> PERSISTABLE_ATTRIBUTES; /** * Optional single-valued, boolean property that marks this document as one * the Connector Manager should persist locally in its per-document store. If * not present, this is assumed to be false. If true, then the Connector * Manager will persist all attributes that are keys in the * {@link #PERSISTABLE_ATTRIBUTES} map. */ public static final String PROPNAME_MANAGER_SHOULD_PERSIST = "google:persist"; /** * Reserved by the Connector Manager to indicate the connector instance * that submitted this document. Should not be supplied by the * connector developer, and if supplied, it will be ignored. * <p> * This property is persistable (it is one of the keys in the * {@link #PERSISTABLE_ATTRIBUTES} map. */ public static final String PROPNAME_CONNECTOR_INSTANCE = "google:connector_instance"; /** * Reserved by the Connector Manager to indicate type of the connector * that submitted this document. Should not be supplied by the * connector developer, and if supplied, it will be ignored. * <p> * This property is persistable (it is one of the keys in the * {@link #PERSISTABLE_ATTRIBUTES} map. */ public static final String PROPNAME_CONNECTOR_TYPE = "google:connector_type"; /** * Optional, single-valued property that gives the name primary folder in * which this document lives. If not supplied, but the * {@link #PROPNAME_FOLDER} property is supplied, then the first value of * that multi-valued property will be used here. The primary use-case of this * attribute is to be stored, so that a connector can later query to find * all documents in a folder. * <p> * This property is persistable (it is one of the keys in the * {@link #PERSISTABLE_ATTRIBUTES} map. */ public static final String PROPNAME_PRIMARY_FOLDER = "google:primary_folder"; /** * Reserved by the Connector Manager to indicate the time at which the * Connector Manager handled this document. Should not be supplied by the * connector developer, and if supplied, it will be ignored. * <p> * This property is persistable (it is one of the keys in the * {@link #PERSISTABLE_ATTRIBUTES} map. */ public static final String PROPNAME_TIMESTAMP = "google:timestamp"; /** * Optional, single-valued property that gives a message from the connector * instance about the state of this document. * <p> * This property is persistable (it is one of the keys in the * {@link #PERSISTABLE_ATTRIBUTES} map. */ public static final String PROPNAME_MESSAGE = "google:message"; /** * Optional, single-valued property that gives a compact representation of a * document's content and attributes, to enable a quick comparison with a * foreign repository to see if the document has changed. For example, this * attribute might contain a content hash. The primary use-case of this * attribute is to be stored in the Connector Manager's per-document store. It * will not be supplied when sending a document to the GSA for indexing. * <p> * This property is persistable (it is one of the keys in the * {@link #PERSISTABLE_ATTRIBUTES} map. */ public static final String PROPNAME_SNAPSHOT = "google:snapshot"; /** * Optional, single-valued property that gives the name of the high-level * container object * in which the document lives. This may be an object such as a cabinet or * list. * The primary use-case of this * attribute is to be stored, so that a connector can later query to find * all documents in a container. * <p> * This property is persistable (it is one of the keys in the * {@link #PERSISTABLE_ATTRIBUTES} map. */ public static final String PROPNAME_CONTAINER = "google:container"; /** * Optional, single-valued property the Connector Manager will persist in its * per-document store. This property will not be supplied when sending a * document to the GSA for indexing. */ public static final String PROPNAME_PERSISTED_CUSTOMDATA_1 = "google:custom1"; /** * Optional, single-valued property the Connector Manager will persist in its * per-document store. This property will not be supplied when sending a * document to the GSA for indexing. */ public static final String PROPNAME_PERSISTED_CUSTOMDATA_2 = "google:custom2"; static { PERSISTABLE_ATTRIBUTES = ImmutableMap.<String, String> builder(). put(PROPNAME_DOCID, "docid"). put(PROPNAME_FEEDID, "feedid"). put(PROPNAME_PRIMARY_FOLDER, "folderparent"). put(PROPNAME_ACTION, "action"). put(PROPNAME_TIMESTAMP, "timestamp"). put(PROPNAME_MESSAGE, "message"). put(PROPNAME_SNAPSHOT, "snapshot"). put(PROPNAME_CONTAINER, "container"). put(PROPNAME_PERSISTED_CUSTOMDATA_1, "custom1"). put(PROPNAME_PERSISTED_CUSTOMDATA_2, "custom2"). build(); } }
Fix incorrect @since javadoc tags in SpiConstants
projects/connector-manager/source/java/com/google/enterprise/connector/spi/SpiConstants.java
Fix incorrect @since javadoc tags in SpiConstants
<ide><path>rojects/connector-manager/source/java/com/google/enterprise/connector/spi/SpiConstants.java <ide> * <p> <ide> * Value: "google:" <ide> * <del> * @since 3.0 <add> * @since 2.6.6 <ide> */ <ide> public static final String RESERVED_PROPNAME_PREFIX = "google:"; <ide> <ide> * Value: google:folder <ide> * <ide> * @see "RFC 3986: Uniform Resource Identifier (URI): Generic Syntax" <del> * @since 3.0 <add> * @since 2.6.6 <ide> */ <ide> public static final String PROPNAME_FOLDER = "google:folder"; <ide> <ide> * Value: google:lock <ide> * <ide> * @see "<a href='http://code.google.com/apis/searchappliance/documentation/62/feedsguide.html#defining_the_xml'>Defining the XML Record for a Document</a>" <del> * <add> * @since 2.6.4 <ide> */ <ide> public static final String PROPNAME_LOCK = "google:lock"; <ide>
JavaScript
agpl-3.0
9e874ba5d449b4594a0b3278ab93017226458094
0
slash851/mwEmbed,slash851/mwEmbed,tanyaLibatter/mwEmbed,FlixMaster/mwEmbed,joanpuigsanz/mwEmbed,panda-os/mwEmbed,kaltura/mwEmbed,abaylis/mwEmbed,FlixMaster/mwEmbed,abaylis/mwEmbed,kaltura/mwEmbed,panda-os/mwEmbed,bordar/mwEmbed,abaylis/mwEmbed,abaylis/mwEmbed,panda-os/mwEmbed,tanyaLibatter/mwEmbed,alexmilk/mwEmbed,bordar/mwEmbed,alexmilk/mwEmbed,omridevk/mwEmbed,kaltura/mwEmbed,joanpuigsanz/mwEmbed,omridevk/mwEmbed,alexmilk/mwEmbed,omridevk/mwEmbed,panda-os/mwEmbed,FlixMaster/mwEmbed,tanyaLibatter/mwEmbed,joanpuigsanz/mwEmbed,alexmilk/mwEmbed,joanpuigsanz/mwEmbed,FlixMaster/mwEmbed,bordar/mwEmbed,tanyaLibatter/mwEmbed,slash851/mwEmbed,bordar/mwEmbed,slash851/mwEmbed,omridevk/mwEmbed,kaltura/mwEmbed
( function( mw, $ ) { "use strict"; mw.HtmlBinderHelper=function(element, $scope) { var internal = {}; var updaters = {}; return { bind: function () { var parser=function(originalText,updateHtml) { var matches = originalText.match(/{{(.*)}}/g); if (matches && matches.length > 0) { matches.forEach(function (match) { var name = match.slice(2, -2); var filterIndex=name.indexOf('|'); var filter; if (filterIndex>0) { var filterName=name.substring(filterIndex+1).trim(); name=name.substring(0,filterIndex).trim(); filter=$scope[filterName]; } var updater=function() { var value=internal[name]; if (!value) { value=""; } var newContent = originalText.replace(match,value); if (filter) { newContent=filter(newContent); } updateHtml(newContent); }; defineProp(name, updater); updater(); }); } }; var defineProp = function (name, updateHtml) { //copy from old defenition if (!internal.hasOwnProperty(name)) { internal[name] = $scope[name]; updaters[name] = []; } if (!$scope.hasOwnProperty(name)) { updaters[name] = []; var updater = updaters[name]; Object.defineProperty($scope, name, { get: function () { return internal[name]; }, set: function (newValue) { try { if (internal[name] !== newValue) { internal[name] = newValue; for (var i = 0; i < updater.length; i++) { updater[i](); } } } catch (e) { mw.log("exception in Object.defineProperty " + e.message + " " + e.stack); } } }); } updaters[name].push(updateHtml); }; $('*',element).each(function ($index, el) { var shouldShowValue=el.getAttribute("ng-show"); if (shouldShowValue) { var updateHtml=function() { var newContent = internal[shouldShowValue]; if (newContent) { el.style.display = "inherit"; } else { el.style.display = "none"; } }; defineProp(shouldShowValue, updateHtml); updateHtml(); } if(el.childNodes.length>1) { return; } var originalText = el.innerHTML; parser(originalText, function(newContent) { el.innerHTML = newContent; }); $(el.attributes).each(function (index, element) { //var elementName = element.name; var originalValue = element.value; parser(originalValue, function(newContent) { element.value = newContent; }); }); }); } } } } )( window.mw, window.jQuery );
modules/DebugInfo/resources/simpleBindingHelper.js
( function( mw, $ ) { "use strict"; mw.HtmlBinderHelper=function(element, $scope) { var internal = {}; var updaters = {}; return { bind: function () { var parser=function(originalText,updateHtml) { var matches = originalText.match(/{{(.*)}}/g); if (matches && matches.length > 0) { matches.forEach(function (match) { var name = match.slice(2, -2); var filterIndex=name.indexOf('|'); var filter; if (filterIndex>0) { var filterName=name.substring(filterIndex+1).trim(); name=name.substring(0,filterIndex).trim(); filter=$scope[filterName]; } var updater=function() { var value=internal[name]; if (!value) { value=""; } var newContent = originalText.replace(match,value); if (filter) { newContent=filter(newContent); } updateHtml(newContent); }; defineProp(name, updater); updater(); }); } }; var defineProp = function (name, updateHtml) { //copy from old defenition if (!internal.hasOwnProperty(name)) { internal[name] = $scope[name]; updaters[name] = []; } if (!$scope.hasOwnProperty(name)) { updaters[name] = []; var updater = updaters[name]; Object.defineProperty($scope, name, { get: function () { return internal[name]; }, set: function (newValue) { try { if (internal[name] !== newValue) { internal[name] = newValue; for (var i = 0; i < updater.length; i++) { updater[i](); } } } catch (e) { mw.log("exception in Object.defineProperty " + e.message + " " + e.stack); } } }); } updaters[name].push(updateHtml); }; $('*',element).each(function ($index, el) { var shouldShowValue=el.getAttribute("ng-show"); if (shouldShowValue) { var updateHtml=function() { var newContent = internal[shouldShowValue]; if (newContent) { el.style.display = "inherit"; } else { el.style.display = "none"; } }; defineProp(shouldShowValue, updateHtml); updateHtml(); } if(el.childNodes.length>1) { return; } var originalText = el.innerText; parser(originalText, function(newContent) { el.innerHTML = newContent; }); $(el.attributes).each(function (index, element) { //var elementName = element.name; var originalValue = element.value; parser(originalValue, function(newContent) { element.value = newContent; }); }); }); } } } } )( window.mw, window.jQuery );
fix safari
modules/DebugInfo/resources/simpleBindingHelper.js
fix safari
<ide><path>odules/DebugInfo/resources/simpleBindingHelper.js <ide> if(el.childNodes.length>1) { <ide> return; <ide> } <del> var originalText = el.innerText; <add> var originalText = el.innerHTML; <ide> <ide> parser(originalText, function(newContent) { <ide> el.innerHTML = newContent;
Java
bsd-3-clause
475669f8e088c89c26a93e2cb448850ad2ec8e04
0
jpetazzo/s3auth,jpetazzo/s3auth,jpetazzo/s3auth
/** * Copyright (c) 2012-2014, s3auth.com * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: 1) Redistributions of source code must retain the above * copyright notice, this list of conditions and the following * disclaimer. 2) Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. 3) Neither the name of the s3auth.com nor * the names of its contributors may be used to endorse or promote * products derived from this software without specific prior written * permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT * NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL * THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.s3auth.hosts; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.model.GetObjectRequest; import com.amazonaws.services.s3.model.ObjectMetadata; import com.amazonaws.services.s3.model.S3Object; import com.amazonaws.services.s3.model.S3ObjectInputStream; import com.jcabi.aspects.Tv; import java.io.ByteArrayInputStream; import java.io.IOException; import java.security.SecureRandom; import java.util.Collection; import java.util.Date; import java.util.Random; import org.apache.http.client.methods.HttpGet; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; import org.junit.Test; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; /** * Test case for {@link DefaultResource}. * @author Yegor Bugayenko ([email protected]) * @version $Id$ * @checkstyle ClassDataAbstractionCoupling (500 lines) */ public final class DefaultResourceTest { /** * DefaultResource can build headers. * @throws Exception If there is some problem inside */ @Test public void getsHeadersFromAmazonObject() throws Exception { final AmazonS3 client = Mockito.mock(AmazonS3.class); final S3Object object = Mockito.mock(S3Object.class); Mockito.doReturn(object).when(client) .getObject(Mockito.any(GetObjectRequest.class)); final ObjectMetadata meta = Mockito.mock(ObjectMetadata.class); Mockito.doReturn(meta).when(object).getObjectMetadata(); Mockito.doReturn(1L).when(meta).getContentLength(); final Resource res = new DefaultResource( client, "a", "", Range.ENTIRE, Version.LATEST, Mockito.mock(DomainStatsData.class) ); MatcherAssert.assertThat( res.headers(), Matchers.hasItem("Content-Length: 1") ); } /** * DefaultResource can write to output stream. * @throws Exception If there is some problem inside */ @Test public void writesFromAmazonObjectToOutputStream() throws Exception { final AmazonS3 client = Mockito.mock(AmazonS3.class); final S3Object object = Mockito.mock(S3Object.class); Mockito.doReturn(object).when(client) .getObject(Mockito.any(GetObjectRequest.class)); final S3ObjectInputStream stream = Mockito.mock(S3ObjectInputStream.class); Mockito.doReturn(-1).when(stream).read(Mockito.any(byte[].class)); Mockito.doReturn(stream).when(object).getObjectContent(); MatcherAssert.assertThat( ResourceMocker.toString( new DefaultResource( client, "b", "", Range.ENTIRE, Version.LATEST, Mockito.mock(DomainStatsData.class) ) ), Matchers.equalTo("") ); } /** * DefaultResource can write a real input stream to output stream. * @throws Exception If there is some problem inside */ @Test public void writesInputToOutputStream() throws Exception { final int size = 100 * 1024; final byte[] data = new byte[size]; final Random random = new SecureRandom(); for (int pos = 0; pos < size; ++pos) { data[pos] = (byte) random.nextInt(); } final S3ObjectInputStream stream = new S3ObjectInputStream( new ByteArrayInputStream(data), new HttpGet() ); final AmazonS3 client = Mockito.mock(AmazonS3.class); final S3Object object = Mockito.mock(S3Object.class); Mockito.doReturn(object).when(client) .getObject(Mockito.any(GetObjectRequest.class)); Mockito.doReturn(stream).when(object).getObjectContent(); MatcherAssert.assertThat( ResourceMocker.toByteArray( new DefaultResource( client, "c", "", Range.ENTIRE, Version.LATEST, Mockito.mock(DomainStatsData.class) ) ), Matchers.equalTo(data) ); } /** * DefaultResource can throw when failed to read. * @throws Exception If there is some problem inside */ @Test(expected = IOException.class) public void throwsWhenFailedToRead() throws Exception { final S3ObjectInputStream stream = Mockito.mock(S3ObjectInputStream.class); Mockito.doThrow(new IOException("oops")) .when(stream).read(Mockito.any(byte[].class)); final AmazonS3 client = Mockito.mock(AmazonS3.class); final S3Object object = Mockito.mock(S3Object.class); Mockito.doReturn(object).when(client) .getObject(Mockito.any(GetObjectRequest.class)); Mockito.doReturn(stream).when(object).getObjectContent(); MatcherAssert.assertThat( ResourceMocker.toString( new DefaultResource( client, "d", "", Range.ENTIRE, Version.LATEST, Mockito.mock(DomainStatsData.class) ) ), Matchers.equalTo("") ); } /** * DefaultResource can obtain its last modified date. * @throws Exception If there is some problem inside */ @Test public void getsLastModifiedDate() throws Exception { final Date date = new Date(); final AmazonS3 client = Mockito.mock(AmazonS3.class); final S3Object object = Mockito.mock(S3Object.class); Mockito.doReturn(object).when(client) .getObject(Mockito.any(GetObjectRequest.class)); final ObjectMetadata meta = Mockito.mock(ObjectMetadata.class); Mockito.doReturn(meta).when(object).getObjectMetadata(); Mockito.doReturn(date).when(meta).getLastModified(); final Resource res = new DefaultResource( client, "x", "", Range.ENTIRE, Version.LATEST, Mockito.mock(DomainStatsData.class) ); MatcherAssert.assertThat( res.lastModified(), Matchers.is(date) ); } /** * DefaultResource can get Cache-Control info. * @throws Exception If there is some problem inside */ @Test public void getsCacheControlHeaderFromAmazonObject() throws Exception { final AmazonS3 client = Mockito.mock(AmazonS3.class); final S3Object object = Mockito.mock(S3Object.class); Mockito.doReturn(object).when(client) .getObject(Mockito.any(GetObjectRequest.class)); final ObjectMetadata meta = Mockito.mock(ObjectMetadata.class); Mockito.doReturn(meta).when(object).getObjectMetadata(); Mockito.doReturn("max-age: 600, public").when(meta).getCacheControl(); final Resource res = new DefaultResource( client, "e", "", Range.ENTIRE, Version.LATEST, Mockito.mock(DomainStatsData.class) ); MatcherAssert.assertThat( res.headers(), Matchers.hasItem("Cache-Control: max-age: 600, public") ); } /** * DefaultResource can get default Cache-Control info if resource metadata * does not specify it. * @throws Exception If there is some problem inside */ @Test public void getsDefaultCacheControlHeader() throws Exception { final AmazonS3 client = Mockito.mock(AmazonS3.class); final S3Object object = Mockito.mock(S3Object.class); Mockito.doReturn(object).when(client) .getObject(Mockito.any(GetObjectRequest.class)); final ObjectMetadata meta = Mockito.mock(ObjectMetadata.class); Mockito.doReturn(meta).when(object).getObjectMetadata(); Mockito.doReturn(null).when(meta).getCacheControl(); final Resource res = new DefaultResource( client, "f", "", Range.ENTIRE, Version.LATEST, Mockito.mock(DomainStatsData.class) ); MatcherAssert.assertThat( res.headers(), Matchers.hasItem("Cache-Control: must-revalidate") ); } /** * DefaultResource can post metrics. * @throws Exception If there is some problem inside */ @Test public void postsMetricData() throws Exception { final int size = 100; final byte[] data = new byte[size]; final Random random = new Random(); for (int pos = 0; pos < size; ++pos) { data[pos] = (byte) random.nextInt(); } final S3ObjectInputStream stream = new S3ObjectInputStream( new ByteArrayInputStream(data), new HttpGet() ); final AmazonS3 client = Mockito.mock(AmazonS3.class); final S3Object object = Mockito.mock(S3Object.class); Mockito.doReturn(object).when(client) .getObject(Mockito.any(GetObjectRequest.class)); Mockito.doReturn(stream).when(object).getObjectContent(); final DomainStatsData stats = Mockito.mock(DomainStatsData.class); final String bucket = "MetricsTest"; MatcherAssert.assertThat( ResourceMocker.toByteArray( new DefaultResource( client, bucket, "", Range.ENTIRE, Version.LATEST, stats ) ), Matchers.equalTo(data) ); Mockito.verify(stats, Mockito.only()).put( bucket, new Stats.Simple(data.length) ); } /** * DefaultResource can specify an object version to retrieve. * @throws Exception If there is some problem inside */ @Test public void specifiesObjectVersion() throws Exception { final AmazonS3 client = Mockito.mock(AmazonS3.class); final String version = "abcd"; Mockito.doAnswer( new Answer<S3Object>() { @Override public S3Object answer(final InvocationOnMock invocation) { final GetObjectRequest req = (GetObjectRequest) invocation.getArguments()[0]; MatcherAssert.assertThat( req.getVersionId(), Matchers.is(version) ); return Mockito.mock(S3Object.class); } } ).when(client).getObject(Mockito.any(GetObjectRequest.class)); new DefaultResource( client, "h", "", Range.ENTIRE, new Version.Simple(version), Mockito.mock(DomainStatsData.class) ); } /** * DefaultResource can close the underlying S3Object. * @throws Exception If there is some problem inside */ @Test public void closesUnderlyingObject() throws Exception { final AmazonS3 client = Mockito.mock(AmazonS3.class); final S3Object object = Mockito.mock(S3Object.class); Mockito.doReturn(object).when(client) .getObject(Mockito.any(GetObjectRequest.class)); final ObjectMetadata meta = Mockito.mock(ObjectMetadata.class); Mockito.doReturn(meta).when(object).getObjectMetadata(); Mockito.doReturn(1L).when(meta).getContentLength(); new DefaultResource( client, "i", "", Range.ENTIRE, Version.LATEST, Mockito.mock(DomainStatsData.class) ).close(); Mockito.verify(object, Mockito.times(1)).close(); } /** * DefaultResource closes the underlying object when obtaining the full * object size from the Content-Range header. * @throws Exception If there is some problem inside */ @Test public void closesUnderlyingObjectWhenSizeIsInvoked() throws Exception { final AmazonS3 client = Mockito.mock(AmazonS3.class); final S3Object object = Mockito.mock(S3Object.class); Mockito.doReturn(object).when(client) .getObject(Mockito.any(GetObjectRequest.class)); final ObjectMetadata meta = Mockito.mock(ObjectMetadata.class); Mockito.doReturn(meta).when(object).getObjectMetadata(); Mockito.doReturn((long) Tv.TEN).when(meta).getContentLength(); final Collection<String> headers = new DefaultResource( client, "j", "", new Range.Simple(0, 1), Version.LATEST, Mockito.mock(DomainStatsData.class) ).headers(); MatcherAssert.assertThat( headers, Matchers.hasItem( Matchers.containsString("Content-Range: bytes 0-1/10") ) ); Mockito.verify(object, Mockito.times(1)).close(); } /** * DefaultResource can get Content-Encoding info. * @throws Exception If there is some problem inside */ @Test public void getsContentEncodingHeaderFromAmazonObject() throws Exception { final AmazonS3 client = Mockito.mock(AmazonS3.class); final S3Object object = Mockito.mock(S3Object.class); Mockito.doReturn(object).when(client) .getObject(Mockito.any(GetObjectRequest.class)); final ObjectMetadata meta = Mockito.mock(ObjectMetadata.class); Mockito.doReturn(meta).when(object).getObjectMetadata(); Mockito.doReturn("gzip").when(meta).getContentEncoding(); final Resource res = new DefaultResource( client, "abcdef", "", Range.ENTIRE, Version.LATEST, Mockito.mock(DomainStatsData.class) ); MatcherAssert.assertThat( res.headers(), Matchers.hasItem("Content-Encoding: gzip") ); } }
s3auth-hosts/src/test/java/com/s3auth/hosts/DefaultResourceTest.java
/** * Copyright (c) 2012-2014, s3auth.com * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: 1) Redistributions of source code must retain the above * copyright notice, this list of conditions and the following * disclaimer. 2) Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. 3) Neither the name of the s3auth.com nor * the names of its contributors may be used to endorse or promote * products derived from this software without specific prior written * permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT * NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL * THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.s3auth.hosts; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.model.GetObjectRequest; import com.amazonaws.services.s3.model.ObjectMetadata; import com.amazonaws.services.s3.model.S3Object; import com.amazonaws.services.s3.model.S3ObjectInputStream; import com.jcabi.aspects.Tv; import java.io.ByteArrayInputStream; import java.io.IOException; import java.util.Collection; import java.util.Date; import java.util.Random; import org.apache.http.client.methods.HttpGet; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; import org.junit.Test; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; /** * Test case for {@link DefaultResource}. * @author Yegor Bugayenko ([email protected]) * @version $Id$ * @checkstyle ClassDataAbstractionCoupling (500 lines) */ public final class DefaultResourceTest { /** * DefaultResource can build headers. * @throws Exception If there is some problem inside */ @Test public void getsHeadersFromAmazonObject() throws Exception { final AmazonS3 client = Mockito.mock(AmazonS3.class); final S3Object object = Mockito.mock(S3Object.class); Mockito.doReturn(object).when(client) .getObject(Mockito.any(GetObjectRequest.class)); final ObjectMetadata meta = Mockito.mock(ObjectMetadata.class); Mockito.doReturn(meta).when(object).getObjectMetadata(); Mockito.doReturn(1L).when(meta).getContentLength(); final Resource res = new DefaultResource( client, "a", "", Range.ENTIRE, Version.LATEST, Mockito.mock(DomainStatsData.class) ); MatcherAssert.assertThat( res.headers(), Matchers.hasItem("Content-Length: 1") ); } /** * DefaultResource can write to output stream. * @throws Exception If there is some problem inside */ @Test public void writesFromAmazonObjectToOutputStream() throws Exception { final AmazonS3 client = Mockito.mock(AmazonS3.class); final S3Object object = Mockito.mock(S3Object.class); Mockito.doReturn(object).when(client) .getObject(Mockito.any(GetObjectRequest.class)); final S3ObjectInputStream stream = Mockito.mock(S3ObjectInputStream.class); Mockito.doReturn(-1).when(stream).read(Mockito.any(byte[].class)); Mockito.doReturn(stream).when(object).getObjectContent(); MatcherAssert.assertThat( ResourceMocker.toString( new DefaultResource( client, "b", "", Range.ENTIRE, Version.LATEST, Mockito.mock(DomainStatsData.class) ) ), Matchers.equalTo("") ); } /** * DefaultResource can write a real input stream to output stream. * @throws Exception If there is some problem inside */ @Test public void writesInputToOutputStream() throws Exception { final int size = 100 * 1024; final byte[] data = new byte[size]; final Random random = new Random(); for (int pos = 0; pos < size; ++pos) { data[pos] = (byte) random.nextInt(); } final S3ObjectInputStream stream = new S3ObjectInputStream( new ByteArrayInputStream(data), new HttpGet() ); final AmazonS3 client = Mockito.mock(AmazonS3.class); final S3Object object = Mockito.mock(S3Object.class); Mockito.doReturn(object).when(client) .getObject(Mockito.any(GetObjectRequest.class)); Mockito.doReturn(stream).when(object).getObjectContent(); MatcherAssert.assertThat( ResourceMocker.toByteArray( new DefaultResource( client, "c", "", Range.ENTIRE, Version.LATEST, Mockito.mock(DomainStatsData.class) ) ), Matchers.equalTo(data) ); } /** * DefaultResource can throw when failed to read. * @throws Exception If there is some problem inside */ @Test(expected = IOException.class) public void throwsWhenFailedToRead() throws Exception { final S3ObjectInputStream stream = Mockito.mock(S3ObjectInputStream.class); Mockito.doThrow(new IOException("oops")) .when(stream).read(Mockito.any(byte[].class)); final AmazonS3 client = Mockito.mock(AmazonS3.class); final S3Object object = Mockito.mock(S3Object.class); Mockito.doReturn(object).when(client) .getObject(Mockito.any(GetObjectRequest.class)); Mockito.doReturn(stream).when(object).getObjectContent(); MatcherAssert.assertThat( ResourceMocker.toString( new DefaultResource( client, "d", "", Range.ENTIRE, Version.LATEST, Mockito.mock(DomainStatsData.class) ) ), Matchers.equalTo("") ); } /** * DefaultResource can obtain its last modified date. * @throws Exception If there is some problem inside */ @Test public void getsLastModifiedDate() throws Exception { final Date date = new Date(); final AmazonS3 client = Mockito.mock(AmazonS3.class); final S3Object object = Mockito.mock(S3Object.class); Mockito.doReturn(object).when(client) .getObject(Mockito.any(GetObjectRequest.class)); final ObjectMetadata meta = Mockito.mock(ObjectMetadata.class); Mockito.doReturn(meta).when(object).getObjectMetadata(); Mockito.doReturn(date).when(meta).getLastModified(); final Resource res = new DefaultResource( client, "x", "", Range.ENTIRE, Version.LATEST, Mockito.mock(DomainStatsData.class) ); MatcherAssert.assertThat( res.lastModified(), Matchers.is(date) ); } /** * DefaultResource can get Cache-Control info. * @throws Exception If there is some problem inside */ @Test public void getsCacheControlHeaderFromAmazonObject() throws Exception { final AmazonS3 client = Mockito.mock(AmazonS3.class); final S3Object object = Mockito.mock(S3Object.class); Mockito.doReturn(object).when(client) .getObject(Mockito.any(GetObjectRequest.class)); final ObjectMetadata meta = Mockito.mock(ObjectMetadata.class); Mockito.doReturn(meta).when(object).getObjectMetadata(); Mockito.doReturn("max-age: 600, public").when(meta).getCacheControl(); final Resource res = new DefaultResource( client, "e", "", Range.ENTIRE, Version.LATEST, Mockito.mock(DomainStatsData.class) ); MatcherAssert.assertThat( res.headers(), Matchers.hasItem("Cache-Control: max-age: 600, public") ); } /** * DefaultResource can get default Cache-Control info if resource metadata * does not specify it. * @throws Exception If there is some problem inside */ @Test public void getsDefaultCacheControlHeader() throws Exception { final AmazonS3 client = Mockito.mock(AmazonS3.class); final S3Object object = Mockito.mock(S3Object.class); Mockito.doReturn(object).when(client) .getObject(Mockito.any(GetObjectRequest.class)); final ObjectMetadata meta = Mockito.mock(ObjectMetadata.class); Mockito.doReturn(meta).when(object).getObjectMetadata(); Mockito.doReturn(null).when(meta).getCacheControl(); final Resource res = new DefaultResource( client, "f", "", Range.ENTIRE, Version.LATEST, Mockito.mock(DomainStatsData.class) ); MatcherAssert.assertThat( res.headers(), Matchers.hasItem("Cache-Control: must-revalidate") ); } /** * DefaultResource can post metrics. * @throws Exception If there is some problem inside */ @Test public void postsMetricData() throws Exception { final int size = 100; final byte[] data = new byte[size]; final Random random = new Random(); for (int pos = 0; pos < size; ++pos) { data[pos] = (byte) random.nextInt(); } final S3ObjectInputStream stream = new S3ObjectInputStream( new ByteArrayInputStream(data), new HttpGet() ); final AmazonS3 client = Mockito.mock(AmazonS3.class); final S3Object object = Mockito.mock(S3Object.class); Mockito.doReturn(object).when(client) .getObject(Mockito.any(GetObjectRequest.class)); Mockito.doReturn(stream).when(object).getObjectContent(); final DomainStatsData stats = Mockito.mock(DomainStatsData.class); final String bucket = "MetricsTest"; MatcherAssert.assertThat( ResourceMocker.toByteArray( new DefaultResource( client, bucket, "", Range.ENTIRE, Version.LATEST, stats ) ), Matchers.equalTo(data) ); Mockito.verify(stats, Mockito.only()).put( bucket, new Stats.Simple(data.length) ); } /** * DefaultResource can specify an object version to retrieve. * @throws Exception If there is some problem inside */ @Test public void specifiesObjectVersion() throws Exception { final AmazonS3 client = Mockito.mock(AmazonS3.class); final String version = "abcd"; Mockito.doAnswer( new Answer<S3Object>() { @Override public S3Object answer(final InvocationOnMock invocation) { final GetObjectRequest req = (GetObjectRequest) invocation.getArguments()[0]; MatcherAssert.assertThat( req.getVersionId(), Matchers.is(version) ); return Mockito.mock(S3Object.class); } } ).when(client).getObject(Mockito.any(GetObjectRequest.class)); new DefaultResource( client, "h", "", Range.ENTIRE, new Version.Simple(version), Mockito.mock(DomainStatsData.class) ); } /** * DefaultResource can close the underlying S3Object. * @throws Exception If there is some problem inside */ @Test public void closesUnderlyingObject() throws Exception { final AmazonS3 client = Mockito.mock(AmazonS3.class); final S3Object object = Mockito.mock(S3Object.class); Mockito.doReturn(object).when(client) .getObject(Mockito.any(GetObjectRequest.class)); final ObjectMetadata meta = Mockito.mock(ObjectMetadata.class); Mockito.doReturn(meta).when(object).getObjectMetadata(); Mockito.doReturn(1L).when(meta).getContentLength(); new DefaultResource( client, "i", "", Range.ENTIRE, Version.LATEST, Mockito.mock(DomainStatsData.class) ).close(); Mockito.verify(object, Mockito.times(1)).close(); } /** * DefaultResource closes the underlying object when obtaining the full * object size from the Content-Range header. * @throws Exception If there is some problem inside */ @Test public void closesUnderlyingObjectWhenSizeIsInvoked() throws Exception { final AmazonS3 client = Mockito.mock(AmazonS3.class); final S3Object object = Mockito.mock(S3Object.class); Mockito.doReturn(object).when(client) .getObject(Mockito.any(GetObjectRequest.class)); final ObjectMetadata meta = Mockito.mock(ObjectMetadata.class); Mockito.doReturn(meta).when(object).getObjectMetadata(); Mockito.doReturn((long) Tv.TEN).when(meta).getContentLength(); final Collection<String> headers = new DefaultResource( client, "j", "", new Range.Simple(0, 1), Version.LATEST, Mockito.mock(DomainStatsData.class) ).headers(); MatcherAssert.assertThat( headers, Matchers.hasItem( Matchers.containsString("Content-Range: bytes 0-1/10") ) ); Mockito.verify(object, Mockito.times(1)).close(); } }
#207 problem reproduced
s3auth-hosts/src/test/java/com/s3auth/hosts/DefaultResourceTest.java
#207 problem reproduced
<ide><path>3auth-hosts/src/test/java/com/s3auth/hosts/DefaultResourceTest.java <ide> import com.jcabi.aspects.Tv; <ide> import java.io.ByteArrayInputStream; <ide> import java.io.IOException; <add>import java.security.SecureRandom; <ide> import java.util.Collection; <ide> import java.util.Date; <ide> import java.util.Random; <ide> public void writesInputToOutputStream() throws Exception { <ide> final int size = 100 * 1024; <ide> final byte[] data = new byte[size]; <del> final Random random = new Random(); <add> final Random random = new SecureRandom(); <ide> for (int pos = 0; pos < size; ++pos) { <ide> data[pos] = (byte) random.nextInt(); <ide> } <ide> Mockito.verify(object, Mockito.times(1)).close(); <ide> } <ide> <add> /** <add> * DefaultResource can get Content-Encoding info. <add> * @throws Exception If there is some problem inside <add> */ <add> @Test <add> public void getsContentEncodingHeaderFromAmazonObject() throws Exception { <add> final AmazonS3 client = Mockito.mock(AmazonS3.class); <add> final S3Object object = Mockito.mock(S3Object.class); <add> Mockito.doReturn(object).when(client) <add> .getObject(Mockito.any(GetObjectRequest.class)); <add> final ObjectMetadata meta = Mockito.mock(ObjectMetadata.class); <add> Mockito.doReturn(meta).when(object).getObjectMetadata(); <add> Mockito.doReturn("gzip").when(meta).getContentEncoding(); <add> final Resource res = new DefaultResource( <add> client, "abcdef", "", Range.ENTIRE, Version.LATEST, <add> Mockito.mock(DomainStatsData.class) <add> ); <add> MatcherAssert.assertThat( <add> res.headers(), <add> Matchers.hasItem("Content-Encoding: gzip") <add> ); <add> } <add> <ide> }
Java
apache-2.0
7636cf3d6b80944f89786328b9c21485834d0011
0
phax/ph-commons
/** * Copyright (C) 2014-2019 Philip Helger (www.helger.com) * philip[at]helger[dot]com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.helger.commons.codec; import java.io.IOException; import java.io.Writer; import java.util.BitSet; import javax.annotation.Nonnegative; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.annotation.WillNotClose; import com.helger.commons.string.StringHelper; import com.helger.commons.text.util.ABNF; /** * Codec for RFC 2616 HTTP header values. * * @author Philip Helger * @since 9.3.6 */ public class RFC2616Codec implements ICharArrayCodec { private static final char QUOTE_CHAR = '"'; private static final char ESCAPE_CHAR = '\\'; // Non-token chars according to RFC 2616 private static final BitSet NON_TOKEN_RFC2616 = new BitSet (256); static { for (int i = ABNF.CHECK_RANGE_MIN_INCL; i <= ABNF.CHECK_RANGE_MAX_INCL; ++i) if (ABNF.isCtl (i) || ABNF.isSP (i) || ABNF.isHTab (i)) NON_TOKEN_RFC2616.set (i); NON_TOKEN_RFC2616.set ('('); NON_TOKEN_RFC2616.set (')'); NON_TOKEN_RFC2616.set ('<'); NON_TOKEN_RFC2616.set ('>'); NON_TOKEN_RFC2616.set ('@'); NON_TOKEN_RFC2616.set (','); NON_TOKEN_RFC2616.set (';'); NON_TOKEN_RFC2616.set (':'); NON_TOKEN_RFC2616.set ('\\'); NON_TOKEN_RFC2616.set ('"'); NON_TOKEN_RFC2616.set ('/'); NON_TOKEN_RFC2616.set ('['); NON_TOKEN_RFC2616.set (']'); NON_TOKEN_RFC2616.set ('?'); NON_TOKEN_RFC2616.set ('='); NON_TOKEN_RFC2616.set ('{'); NON_TOKEN_RFC2616.set ('}'); } public static boolean isToken (@Nullable final String s) { // May not be empty if (s == null) return false; return isToken (s.toCharArray ()); } public static boolean isToken (@Nullable final char [] aChars) { // May not be empty if (aChars == null || aChars.length == 0) return false; // No forbidden chars may be present for (final char c : aChars) if (NON_TOKEN_RFC2616.get (c)) return false; return true; } public static boolean isMaybeEncoded (@Nullable final String s) { return s != null && s.length () >= 2 && s.charAt (0) == QUOTE_CHAR && StringHelper.getLastChar (s) == QUOTE_CHAR; } public RFC2616Codec () {} /** * Get the maximum encoded length based on the provided decoded length. This * is purely for performance reasons. The name of the method would be better * called "getMaximumEncodedLength". * * @param nDecodedLen * The decoded length. Always &ge; 0. * @return The maximum encoded length. Always &ge; 0. * @deprecated Use {@link #getMaximumEncodedLength(int)} */ @Nonnegative @Deprecated public int getEncodedLength (@Nonnegative final int nDecodedLen) { return getMaximumEncodedLength (nDecodedLen); } @Nonnegative public int getMaximumEncodedLength (@Nonnegative final int nDecodedLen) { // Worst case: each char needs quoting return 1 + 2 * nDecodedLen + 1; } public void encode (@Nullable final char [] aDecodedBuffer, @Nonnegative final int nOfs, @Nonnegative final int nLen, @Nonnull @WillNotClose final Writer aWriter) { // Length 0 is okay, because it results in an empty string if (aDecodedBuffer == null) return; try { // Opening quote aWriter.write (QUOTE_CHAR); for (int i = 0; i < nLen; ++i) { final char b = aDecodedBuffer[nOfs + i]; if (b == ESCAPE_CHAR || b == QUOTE_CHAR) aWriter.write (ESCAPE_CHAR); aWriter.write (b); } // closing quote aWriter.write (QUOTE_CHAR); } catch (final IOException ex) { throw new EncodeException ("Failed to encode RFC2616", ex); } } @Nonnegative public int getMaximumDecodedLength (@Nonnegative final int nEncodedLen) { if (nEncodedLen < 2) return 0; // Without leading and trailing quote return nEncodedLen - 2; } public void decode (@Nullable final char [] aEncodedBuffer, @Nonnegative final int nOfs, @Nonnegative final int nLen, @Nonnull @WillNotClose final Writer aWriter) { if (aEncodedBuffer == null) return; if (nLen < 2) throw new DecodeException ("At least the 2 quote characters must be present. Provided length is only " + nLen); if (aEncodedBuffer[nOfs] != QUOTE_CHAR) throw new DecodeException ("The provided bytes does not seem to be encoded. The first byte is not the double quote character."); final int nLastOfs = nOfs + nLen - 1; if (aEncodedBuffer[nLastOfs] != QUOTE_CHAR) throw new DecodeException ("The provided bytes does not seem to be encoded. The last byte is not the double quote character."); try { for (int i = nOfs + 1; i < nLastOfs; ++i) { final char c = aEncodedBuffer[i]; if (c == ESCAPE_CHAR) { if (i == nLastOfs - 1) throw new DecodeException ("The encoded string seems to be cut. The second last character cannot be an escape character."); ++i; aWriter.write (aEncodedBuffer[i]); } else aWriter.write (c); } } catch (final IOException ex) { throw new DecodeException ("Failed to decode RFC2616", ex); } } }
ph-commons/src/main/java/com/helger/commons/codec/RFC2616Codec.java
/** * Copyright (C) 2014-2019 Philip Helger (www.helger.com) * philip[at]helger[dot]com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.helger.commons.codec; import java.io.IOException; import java.io.Writer; import java.util.BitSet; import javax.annotation.Nonnegative; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.annotation.WillNotClose; import com.helger.commons.string.StringHelper; import com.helger.commons.text.util.ABNF; /** * Codec for RFC 2616 HTTP header values. * * @author Philip Helger * @since 9.3.6 */ public class RFC2616Codec implements ICharArrayCodec { private static final char QUOTE_CHAR = '"'; private static final char ESCAPE_CHAR = '\\'; // Non-token chars according to RFC 2616 private static final BitSet NON_TOKEN_RFC2616 = new BitSet (256); static { for (int i = ABNF.CHECK_RANGE_MIN_INCL; i <= ABNF.CHECK_RANGE_MAX_INCL; ++i) if (ABNF.isCtl (i) || ABNF.isSP (i) || ABNF.isHTab (i)) NON_TOKEN_RFC2616.set (i); NON_TOKEN_RFC2616.set ('('); NON_TOKEN_RFC2616.set (')'); NON_TOKEN_RFC2616.set ('<'); NON_TOKEN_RFC2616.set ('>'); NON_TOKEN_RFC2616.set ('@'); NON_TOKEN_RFC2616.set (','); NON_TOKEN_RFC2616.set (';'); NON_TOKEN_RFC2616.set (':'); NON_TOKEN_RFC2616.set ('\\'); NON_TOKEN_RFC2616.set ('"'); NON_TOKEN_RFC2616.set ('/'); NON_TOKEN_RFC2616.set ('['); NON_TOKEN_RFC2616.set (']'); NON_TOKEN_RFC2616.set ('?'); NON_TOKEN_RFC2616.set ('='); NON_TOKEN_RFC2616.set ('{'); NON_TOKEN_RFC2616.set ('}'); } public static boolean isToken (@Nullable final String s) { // May not be empty if (s == null) return false; return isToken (s.toCharArray ()); } public static boolean isToken (@Nullable final char [] aChars) { // May not be empty if (aChars == null || aChars.length == 0) return false; // No forbidden chars may be present for (final char c : aChars) if (NON_TOKEN_RFC2616.get (c)) return false; return true; } public static boolean isMaybeEncoded (@Nullable final String s) { return s != null && s.length () >= 2 && s.charAt (0) == QUOTE_CHAR && StringHelper.getLastChar (s) == QUOTE_CHAR; } public RFC2616Codec () {} /** * Get the maximum encoded length based on the provided decoded length. This * is purely for performance reasons. The name of the method would be better * called "getMaximumEncodedLength". * * @param nDecodedLen * The decoded length. Always &ge; 0. * @return The maximum encoded length. Always &ge; 0. * @deprecated Use {@link #getMaximumEncodedLength(int)} */ @Nonnegative @Deprecated public int getEncodedLength (@Nonnegative final int nDecodedLen) { return getMaximumEncodedLength (nDecodedLen); } @Nonnegative public int getMaximumEncodedLength (@Nonnegative final int nDecodedLen) { // Worst case: each char needs quoting return 1 + 2 * nDecodedLen + 1; } public void encode (@Nullable final char [] aDecodedBuffer, @Nonnegative final int nOfs, @Nonnegative final int nLen, @Nonnull @WillNotClose final Writer aWriter) { // Length 0 is okay, because it results in an empty string if (aDecodedBuffer == null) return; try { // Opening quote aWriter.write (QUOTE_CHAR); for (int i = 0; i < nLen; ++i) { final char b = aDecodedBuffer[nOfs + i]; if (b == ESCAPE_CHAR || b == QUOTE_CHAR) aWriter.write (ESCAPE_CHAR); aWriter.write (b); } // closing quote aWriter.write (QUOTE_CHAR); } catch (final IOException ex) { throw new EncodeException ("Failed to encode RFC2616", ex); } } @Nonnegative public int getMaximumDecodedLength (@Nonnegative final int nEncodedLen) { // Without leading and trailing quote return nEncodedLen - 2; } public void decode (@Nullable final char [] aEncodedBuffer, @Nonnegative final int nOfs, @Nonnegative final int nLen, @Nonnull @WillNotClose final Writer aWriter) { if (aEncodedBuffer == null) return; if (nLen < 2) throw new DecodeException ("At least the 2 quote characters must be present. Provided length is only " + nLen); if (aEncodedBuffer[nOfs] != QUOTE_CHAR) throw new DecodeException ("The provided bytes does not seem to be encoded. The first byte is not the double quote character."); final int nLastOfs = nOfs + nLen - 1; if (aEncodedBuffer[nLastOfs] != QUOTE_CHAR) throw new DecodeException ("The provided bytes does not seem to be encoded. The last byte is not the double quote character."); try { for (int i = nOfs + 1; i < nLastOfs; ++i) { final char c = aEncodedBuffer[i]; if (c == ESCAPE_CHAR) { if (i == nLastOfs - 1) throw new DecodeException ("The encoded string seems to be cut. The second last character cannot be an escape character."); ++i; aWriter.write (aEncodedBuffer[i]); } else aWriter.write (c); } } catch (final IOException ex) { throw new DecodeException ("Failed to decode RFC2616", ex); } } }
Test fix
ph-commons/src/main/java/com/helger/commons/codec/RFC2616Codec.java
Test fix
<ide><path>h-commons/src/main/java/com/helger/commons/codec/RFC2616Codec.java <ide> @Nonnegative <ide> public int getMaximumDecodedLength (@Nonnegative final int nEncodedLen) <ide> { <add> if (nEncodedLen < 2) <add> return 0; <ide> // Without leading and trailing quote <ide> return nEncodedLen - 2; <ide> }
JavaScript
mit
8dc0bd8327a0acef05a611fa57a7b256951db69f
0
timdp/atom-npm-helper
'use babel' import getPath from 'consistent-path' import {BufferedProcess} from 'atom' export default (command, args, cwd = null) => new Promise((resolve, reject) => { let output = null const stdout = (_output) => output = _output const exit = (code) => { if (code !== 0) { reject(new Error('Exit status ' + code)) } else { resolve(output) } } const env = Object.assign({}, {path: getPath()}, process.env) const options = {cwd, env} /* eslint no-new: 0 */ new BufferedProcess({command, args, options, stdout, exit}) })
lib/util/spawn-promise.js
'use babel' import {getPath} from 'consistent-path' import {BufferedProcess} from 'atom' export default (command, args, cwd = null) => new Promise((resolve, reject) => { let output = null const stdout = (_output) => output = _output const exit = (code) => { if (code !== 0) { reject(new Error('Exit status ' + code)) } else { resolve(output) } } const env = Object.assign({}, {path: getPath()}, process.env) const options = {cwd, env} /* eslint no-new: 0 */ new BufferedProcess({command, args, options, stdout, exit}) })
Update consistent-path import for 2.x
lib/util/spawn-promise.js
Update consistent-path import for 2.x
<ide><path>ib/util/spawn-promise.js <ide> 'use babel' <ide> <del>import {getPath} from 'consistent-path' <add>import getPath from 'consistent-path' <ide> import {BufferedProcess} from 'atom' <ide> <ide> export default (command, args, cwd = null) => new Promise((resolve, reject) => {
Java
apache-2.0
3ae998bf5c55672ba45d2155d79ef256e4d8a561
0
adjohnson916/groovy-core,russel/groovy,EPadronU/incubator-groovy,PascalSchumacher/incubator-groovy,adjohnson916/groovy-core,nobeans/incubator-groovy,yukangguo/incubator-groovy,aaronzirbes/incubator-groovy,guangying945/incubator-groovy,samanalysis/incubator-groovy,antoaravinth/incubator-groovy,rabbitcount/incubator-groovy,paplorinc/incubator-groovy,adjohnson916/incubator-groovy,paulk-asert/incubator-groovy,aim-for-better/incubator-groovy,kidaa/incubator-groovy,samanalysis/incubator-groovy,bsideup/incubator-groovy,i55ac/incubator-groovy,apache/incubator-groovy,paulk-asert/groovy,adjohnson916/groovy-core,tkruse/incubator-groovy,jwagenleitner/incubator-groovy,kenzanmedia/incubator-groovy,mariogarcia/groovy-core,adjohnson916/incubator-groovy,pickypg/incubator-groovy,aim-for-better/incubator-groovy,christoph-frick/groovy-core,samanalysis/incubator-groovy,pledbrook/incubator-groovy,avafanasiev/groovy,aaronzirbes/incubator-groovy,alien11689/groovy-core,fpavageau/groovy,PascalSchumacher/incubator-groovy,tkruse/incubator-groovy,i55ac/incubator-groovy,EPadronU/incubator-groovy,rabbitcount/incubator-groovy,rlovtangen/groovy-core,shils/incubator-groovy,russel/groovy,bsideup/groovy-core,kidaa/incubator-groovy,paulk-asert/incubator-groovy,guangying945/incubator-groovy,antoaravinth/incubator-groovy,armsargis/groovy,dpolivaev/groovy,rabbitcount/incubator-groovy,ebourg/groovy-core,sagarsane/groovy-core,sagarsane/incubator-groovy,traneHead/groovy-core,bsideup/groovy-core,bsideup/incubator-groovy,adjohnson916/groovy-core,shils/incubator-groovy,adjohnson916/incubator-groovy,alien11689/groovy-core,russel/incubator-groovy,taoguan/incubator-groovy,aim-for-better/incubator-groovy,sagarsane/groovy-core,upadhyayap/incubator-groovy,alien11689/groovy-core,ebourg/incubator-groovy,alien11689/incubator-groovy,sagarsane/incubator-groovy,tkruse/incubator-groovy,alien11689/incubator-groovy,paulk-asert/incubator-groovy,russel/incubator-groovy,russel/groovy,ebourg/groovy-core,pickypg/incubator-groovy,EPadronU/incubator-groovy,gillius/incubator-groovy,ChanJLee/incubator-groovy,apache/incubator-groovy,pledbrook/incubator-groovy,mariogarcia/groovy-core,rlovtangen/groovy-core,ChanJLee/incubator-groovy,fpavageau/groovy,ChanJLee/incubator-groovy,traneHead/groovy-core,graemerocher/incubator-groovy,paulk-asert/groovy,apache/groovy,paplorinc/incubator-groovy,sagarsane/incubator-groovy,tkruse/incubator-groovy,samanalysis/incubator-groovy,paulk-asert/groovy,genqiang/incubator-groovy,guangying945/incubator-groovy,aaronzirbes/incubator-groovy,jwagenleitner/groovy,sagarsane/groovy-core,sagarsane/groovy-core,paulk-asert/incubator-groovy,adjohnson916/incubator-groovy,ebourg/groovy-core,fpavageau/groovy,armsargis/groovy,jwagenleitner/incubator-groovy,rabbitcount/incubator-groovy,alien11689/incubator-groovy,shils/groovy,apache/groovy,mariogarcia/groovy-core,alien11689/groovy-core,kidaa/incubator-groovy,apache/incubator-groovy,shils/groovy,dpolivaev/groovy,paulk-asert/incubator-groovy,traneHead/groovy-core,graemerocher/incubator-groovy,i55ac/incubator-groovy,upadhyayap/incubator-groovy,taoguan/incubator-groovy,paplorinc/incubator-groovy,shils/incubator-groovy,ebourg/incubator-groovy,gillius/incubator-groovy,nkhuyu/incubator-groovy,sagarsane/incubator-groovy,guangying945/incubator-groovy,nobeans/incubator-groovy,jwagenleitner/groovy,christoph-frick/groovy-core,bsideup/groovy-core,dpolivaev/groovy,apache/incubator-groovy,apache/groovy,jwagenleitner/incubator-groovy,gillius/incubator-groovy,christoph-frick/groovy-core,bsideup/groovy-core,groovy/groovy-core,shils/groovy,upadhyayap/incubator-groovy,ebourg/incubator-groovy,groovy/groovy-core,pickypg/incubator-groovy,rlovtangen/groovy-core,ebourg/incubator-groovy,nkhuyu/incubator-groovy,PascalSchumacher/incubator-groovy,eginez/incubator-groovy,eginez/incubator-groovy,kenzanmedia/incubator-groovy,nobeans/incubator-groovy,groovy/groovy-core,ChanJLee/incubator-groovy,alien11689/incubator-groovy,aim-for-better/incubator-groovy,rlovtangen/groovy-core,PascalSchumacher/incubator-groovy,PascalSchumacher/incubator-groovy,apache/groovy,ebourg/groovy-core,avafanasiev/groovy,pickypg/incubator-groovy,avafanasiev/groovy,eginez/incubator-groovy,christoph-frick/groovy-core,fpavageau/groovy,taoguan/incubator-groovy,jwagenleitner/groovy,russel/groovy,kidaa/incubator-groovy,eginez/incubator-groovy,paplorinc/incubator-groovy,genqiang/incubator-groovy,russel/incubator-groovy,kenzanmedia/incubator-groovy,i55ac/incubator-groovy,avafanasiev/groovy,yukangguo/incubator-groovy,christoph-frick/groovy-core,armsargis/groovy,graemerocher/incubator-groovy,bsideup/incubator-groovy,paulk-asert/groovy,antoaravinth/incubator-groovy,pledbrook/incubator-groovy,antoaravinth/incubator-groovy,EPadronU/incubator-groovy,shils/incubator-groovy,gillius/incubator-groovy,mariogarcia/groovy-core,yukangguo/incubator-groovy,ebourg/groovy-core,mariogarcia/groovy-core,groovy/groovy-core,groovy/groovy-core,jwagenleitner/groovy,shils/groovy,sagarsane/groovy-core,adjohnson916/groovy-core,jwagenleitner/incubator-groovy,nobeans/incubator-groovy,graemerocher/incubator-groovy,genqiang/incubator-groovy,dpolivaev/groovy,genqiang/incubator-groovy,taoguan/incubator-groovy,traneHead/groovy-core,pledbrook/incubator-groovy,kenzanmedia/incubator-groovy,nkhuyu/incubator-groovy,aaronzirbes/incubator-groovy,alien11689/groovy-core,armsargis/groovy,nkhuyu/incubator-groovy,rlovtangen/groovy-core,russel/incubator-groovy,yukangguo/incubator-groovy,bsideup/incubator-groovy,upadhyayap/incubator-groovy
/* * Copyright 2003-2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.codehaus.groovy.transform.trait; import groovy.transform.CompilationUnitAware; import org.codehaus.groovy.ast.ASTNode; import org.codehaus.groovy.ast.AnnotatedNode; import org.codehaus.groovy.ast.AnnotationNode; import org.codehaus.groovy.ast.ClassHelper; import org.codehaus.groovy.ast.ClassNode; import org.codehaus.groovy.ast.FieldNode; import org.codehaus.groovy.ast.GenericsType; import org.codehaus.groovy.ast.InnerClassNode; import org.codehaus.groovy.ast.MethodNode; import org.codehaus.groovy.ast.Parameter; import org.codehaus.groovy.ast.PropertyNode; import org.codehaus.groovy.ast.expr.ArgumentListExpression; import org.codehaus.groovy.ast.expr.BinaryExpression; import org.codehaus.groovy.ast.expr.CastExpression; import org.codehaus.groovy.ast.expr.ClassExpression; import org.codehaus.groovy.ast.expr.ConstantExpression; import org.codehaus.groovy.ast.expr.Expression; import org.codehaus.groovy.ast.expr.FieldExpression; import org.codehaus.groovy.ast.expr.MethodCallExpression; import org.codehaus.groovy.ast.expr.VariableExpression; import org.codehaus.groovy.ast.stmt.BlockStatement; import org.codehaus.groovy.ast.stmt.ExpressionStatement; import org.codehaus.groovy.ast.stmt.Statement; import org.codehaus.groovy.ast.tools.GeneralUtils; import org.codehaus.groovy.classgen.VariableScopeVisitor; import org.codehaus.groovy.classgen.Verifier; import org.codehaus.groovy.control.CompilationUnit; import org.codehaus.groovy.control.CompilePhase; import org.codehaus.groovy.control.SourceUnit; import org.codehaus.groovy.runtime.InvokerHelper; import org.codehaus.groovy.syntax.SyntaxException; import org.codehaus.groovy.syntax.Token; import org.codehaus.groovy.syntax.Types; import org.codehaus.groovy.transform.ASTTransformationCollectorCodeVisitor; import org.codehaus.groovy.transform.AbstractASTTransformation; import org.codehaus.groovy.transform.GroovyASTTransformation; import org.codehaus.groovy.transform.sc.StaticCompileTransformation; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Set; /** * Handles generation of code for the @Trait annotation. A class annotated with @Trait will generate, instead: <ul> * <li>an <i>interface</i> with the same name</li> <li>an utility inner class that will be used by the compiler to * handle the trait</li> </ul> * * @author Cedric Champeau */ @GroovyASTTransformation(phase = CompilePhase.SEMANTIC_ANALYSIS) public class TraitASTTransformation extends AbstractASTTransformation implements CompilationUnitAware { private static final ClassNode INVOKERHELPER_CLASSNODE = ClassHelper.make(InvokerHelper.class); private static final ClassNode OVERRIDE_CLASSNODE = ClassHelper.make(Override.class); private SourceUnit unit; private CompilationUnit compilationUnit; public void visit(ASTNode[] nodes, SourceUnit source) { AnnotatedNode parent = (AnnotatedNode) nodes[1]; AnnotationNode anno = (AnnotationNode) nodes[0]; if (!Traits.TRAIT_CLASSNODE.equals(anno.getClassNode())) return; unit = source; init(nodes, source); if (parent instanceof ClassNode) { ClassNode cNode = (ClassNode) parent; if (!checkNotInterface(cNode, Traits.TRAIT_TYPE_NAME)) return; checkNoConstructor(cNode); checkExtendsClause(cNode); replaceExtendsByImplements(cNode); createHelperClass(cNode); } } private void checkExtendsClause(final ClassNode cNode) { ClassNode superClass = cNode.getSuperClass(); if (superClass.isInterface() && !Traits.isTrait(superClass)) { addError("Trait cannot extend an interface. Use 'implements' instead", cNode); } } private void replaceExtendsByImplements(final ClassNode cNode) { ClassNode superClass = cNode.getUnresolvedSuperClass(); if (Traits.isTrait(superClass)) { // move from super class to interface; cNode.setSuperClass(ClassHelper.OBJECT_TYPE); cNode.setUnresolvedSuperClass(ClassHelper.OBJECT_TYPE); cNode.addInterface(superClass); resolveScope(cNode); } } private void resolveScope(final ClassNode cNode) { // we need to resolve again! VariableScopeVisitor scopeVisitor = new VariableScopeVisitor(unit); scopeVisitor.visitClass(cNode); } private void checkNoConstructor(final ClassNode cNode) { if (!cNode.getDeclaredConstructors().isEmpty()) { addError("Error processing trait '" + cNode.getName() + "'. " + " Constructors are not allowed.", cNode); } } private static void fixGenerics(MethodNode mn, ClassNode cNode) { if (!cNode.isUsingGenerics()) return; mn.setGenericsTypes(cNode.getGenericsTypes()); } private void createHelperClass(final ClassNode cNode) { ClassNode helper = new InnerClassNode( cNode, Traits.helperClassName(cNode), ACC_PUBLIC | ACC_STATIC | ACC_ABSTRACT | ACC_SYNTHETIC, ClassHelper.OBJECT_TYPE, ClassNode.EMPTY_ARRAY, null ); cNode.setModifiers(ACC_PUBLIC | ACC_INTERFACE | ACC_ABSTRACT); checkInnerClasses(cNode); MethodNode initializer = createInitMethod(false, cNode, helper); MethodNode staticInitializer = createInitMethod(true, cNode, helper); // apply the verifier to have the property nodes generated generatePropertyMethods(cNode); // prepare fields List<FieldNode> fields = new ArrayList<FieldNode>(); Set<String> fieldNames = new HashSet<String>(); for (FieldNode field : cNode.getFields()) { if (!"metaClass".equals(field.getName()) && (!field.isSynthetic() || field.getName().indexOf('$') < 0)) { fields.add(field); fieldNames.add(field.getName()); } } ClassNode fieldHelper = null; if (!fields.isEmpty()) { fieldHelper = new InnerClassNode( cNode, Traits.fieldHelperClassName(cNode), ACC_STATIC | ACC_PUBLIC | ACC_INTERFACE | ACC_ABSTRACT, ClassHelper.OBJECT_TYPE ); } // add methods List<MethodNode> methods = new ArrayList<MethodNode>(cNode.getMethods()); List<MethodNode> nonPublicAPIMethods = new LinkedList<MethodNode>(); for (final MethodNode methodNode : methods) { boolean declared = methodNode.getDeclaringClass() == cNode; if (declared) { if (!methodNode.isSynthetic() && (methodNode.isProtected() || methodNode.getModifiers()==0)) { unit.addError(new SyntaxException("Cannot have protected/package private method in a trait (" + cNode.getName() + "#" + methodNode.getTypeDescriptor() + ")", methodNode.getLineNumber(), methodNode.getColumnNumber())); return; } helper.addMethod(processMethod(cNode, methodNode, fieldHelper, fieldNames)); if (methodNode.isPrivate() || methodNode.isStatic()) { nonPublicAPIMethods.add(methodNode); } } } // remove methods which should not appear in the trait interface for (MethodNode privateMethod : nonPublicAPIMethods) { cNode.removeMethod(privateMethod); } // add fields for (FieldNode field : fields) { processField(field, initializer, staticInitializer, fieldHelper, cNode, fieldNames); } // clear properties to avoid generation of methods cNode.getProperties().clear(); // copy annotations copyClassAnnotations(cNode, helper); fields = new ArrayList<FieldNode>(cNode.getFields()); // reuse the full list of fields for (FieldNode field : fields) { cNode.removeField(field.getName()); } // visit AST xforms registerASTTranformations(helper); unit.getAST().addClass(helper); if (fieldHelper != null) { unit.getAST().addClass(fieldHelper); } } private MethodNode createInitMethod(final boolean isStatic, final ClassNode cNode, final ClassNode helper) { MethodNode initializer = new MethodNode( isStatic?Traits.STATIC_INIT_METHOD:Traits.INIT_METHOD, ACC_STATIC | ACC_PUBLIC | ACC_SYNTHETIC, ClassHelper.VOID_TYPE, new Parameter[]{createSelfParameter(cNode, isStatic)}, ClassNode.EMPTY_ARRAY, new BlockStatement() ); fixGenerics(initializer, cNode); helper.addMethod(initializer); AnnotationNode an = new AnnotationNode(TraitComposer.COMPILESTATIC_CLASSNODE); initializer.addAnnotation(an); cNode.addTransform(StaticCompileTransformation.class, an); return initializer; } private void registerASTTranformations(final ClassNode helper) { ASTTransformationCollectorCodeVisitor collector = new ASTTransformationCollectorCodeVisitor( unit, compilationUnit.getTransformLoader() ); collector.visitClass(helper); } /** * Copies annotation from the trait to the helper, excluding the trait annotation itself * @param cNode the trait class node * @param helper the helper class node */ private void copyClassAnnotations(final ClassNode cNode, final ClassNode helper) { List<AnnotationNode> annotations = cNode.getAnnotations(); for (AnnotationNode annotation : annotations) { if (!annotation.getClassNode().equals(Traits.TRAIT_CLASSNODE)) { helper.addAnnotation(annotation); } } } private void checkInnerClasses(final ClassNode cNode) { Iterator<InnerClassNode> it = cNode.getInnerClasses(); while (it.hasNext()) { InnerClassNode origin = it.next(); if ((origin.getModifiers() & ACC_STATIC) == 0) { unit.addError(new SyntaxException("Cannot have non-static inner class inside a trait ("+origin.getName()+")", origin.getLineNumber(), origin.getColumnNumber())); } } } private void generatePropertyMethods(final ClassNode cNode) { for (PropertyNode node : cNode.getProperties()) { processProperty(cNode, node); } } /** * Mostly copied from the {@link Verifier} class but does *not* generate bytecode * * @param cNode * @param node */ private static void processProperty(final ClassNode cNode, PropertyNode node) { String name = node.getName(); FieldNode field = node.getField(); int propNodeModifiers = node.getModifiers(); String getterName = "get" + Verifier.capitalize(name); String setterName = "set" + Verifier.capitalize(name); // GROOVY-3726: clear volatile, transient modifiers so that they don't get applied to methods if ((propNodeModifiers & Modifier.VOLATILE) != 0) { propNodeModifiers = propNodeModifiers - Modifier.VOLATILE; } if ((propNodeModifiers & Modifier.TRANSIENT) != 0) { propNodeModifiers = propNodeModifiers - Modifier.TRANSIENT; } Statement getterBlock = node.getGetterBlock(); if (getterBlock == null) { MethodNode getter = cNode.getGetterMethod(getterName); if (getter == null && ClassHelper.boolean_TYPE == node.getType()) { String secondGetterName = "is" + Verifier.capitalize(name); getter = cNode.getGetterMethod(secondGetterName); } if (!node.isPrivate() && methodNeedsReplacement(cNode, getter)) { getterBlock = new ExpressionStatement(new FieldExpression(field)); } } Statement setterBlock = node.getSetterBlock(); if (setterBlock == null) { // 2nd arg false below: though not usual, allow setter with non-void return type MethodNode setter = cNode.getSetterMethod(setterName, false); if (!node.isPrivate() && (propNodeModifiers & ACC_FINAL) == 0 && methodNeedsReplacement(cNode, setter)) { setterBlock = new ExpressionStatement( new BinaryExpression( new FieldExpression(field), Token.newSymbol(Types.EQUAL, 0, 0), new VariableExpression("value") ) ); } } if (getterBlock != null) { MethodNode getter = new MethodNode(getterName, propNodeModifiers, node.getType(), Parameter.EMPTY_ARRAY, ClassNode.EMPTY_ARRAY, getterBlock); getter.setSynthetic(true); fixGenerics(getter, cNode); cNode.addMethod(getter); if (ClassHelper.boolean_TYPE == node.getType() || ClassHelper.Boolean_TYPE == node.getType()) { String secondGetterName = "is" + Verifier.capitalize(name); MethodNode secondGetter = new MethodNode(secondGetterName, propNodeModifiers, node.getType(), Parameter.EMPTY_ARRAY, ClassNode.EMPTY_ARRAY, getterBlock); secondGetter.setSynthetic(true); fixGenerics(secondGetter, cNode); cNode.addMethod(secondGetter); } } if (setterBlock != null) { Parameter[] setterParameterTypes = {new Parameter(node.getType(), "value")}; VariableExpression var = (VariableExpression) ((BinaryExpression) ((ExpressionStatement) setterBlock).getExpression()).getRightExpression(); var.setAccessedVariable(setterParameterTypes[0]); MethodNode setter = new MethodNode(setterName, propNodeModifiers, ClassHelper.VOID_TYPE, setterParameterTypes, ClassNode.EMPTY_ARRAY, setterBlock); setter.setSynthetic(true); fixGenerics(setter, cNode); cNode.addMethod(setter); } } private static boolean methodNeedsReplacement(ClassNode classNode, MethodNode m) { // no method found, we need to replace if (m == null) return true; // method is in current class, nothing to be done if (m.getDeclaringClass() == classNode) return false; // do not overwrite final if ((m.getModifiers() & ACC_FINAL) != 0) return false; return true; } private void processField(final FieldNode field, final MethodNode initializer, final MethodNode staticInitializer, final ClassNode fieldHelper, final ClassNode trait, final Set<String> knownFields) { Expression initialExpression = field.getInitialExpression(); MethodNode selectedMethod = field.isStatic()?staticInitializer:initializer; if (initialExpression != null) { VariableExpression thisObject = new VariableExpression(selectedMethod.getParameters()[0]); ExpressionStatement initCode = new ExpressionStatement(initialExpression); processBody(thisObject, selectedMethod, initCode, trait, fieldHelper, knownFields); BlockStatement code = (BlockStatement) selectedMethod.getCode(); MethodCallExpression mce; if (field.isStatic()) { mce = new MethodCallExpression( new ClassExpression(INVOKERHELPER_CLASSNODE), "invokeStaticMethod", new ArgumentListExpression( thisObject, new ConstantExpression(Traits.helperSetterName(field)), initCode.getExpression() ) ); } else { mce = new MethodCallExpression( new CastExpression(createReceiverType(field.isStatic(), fieldHelper), thisObject), Traits.helperSetterName(field), initCode.getExpression() ); } mce.setImplicitThis(false); mce.setSourcePosition(initialExpression); code.addStatement(new ExpressionStatement(mce)); } // define setter/getter helper methods fieldHelper.addMethod( Traits.helperSetterName(field), ACC_PUBLIC | ACC_ABSTRACT, field.getOriginType(), new Parameter[]{new Parameter(field.getOriginType(), "val")}, ClassNode.EMPTY_ARRAY, null ); fieldHelper.addMethod( Traits.helperGetterName(field), ACC_PUBLIC | ACC_ABSTRACT, field.getOriginType(), Parameter.EMPTY_ARRAY, ClassNode.EMPTY_ARRAY, null ); // dummy fields are only used to carry annotations if instance field // and to differentiate from static fields otherwise String dummyFieldName = (field.isStatic() ? Traits.STATIC_FIELD_PREFIX : Traits.FIELD_PREFIX) + (field.isPublic()? Traits.PUBLIC_FIELD_PREFIX : Traits.PRIVATE_FIELD_PREFIX)+ Traits.remappedFieldName(field.getOwner(), field.getName()); FieldNode dummyField = new FieldNode( dummyFieldName, ACC_STATIC | ACC_PUBLIC | ACC_FINAL | ACC_SYNTHETIC, field.getOriginType(), fieldHelper, null ); // copy annotations from field to dummy field List<AnnotationNode> copied = new LinkedList<AnnotationNode>(); List<AnnotationNode> notCopied = new LinkedList<AnnotationNode>(); GeneralUtils.copyAnnotatedNodeAnnotations(field, copied, notCopied); dummyField.addAnnotations(copied); fieldHelper.addField(dummyField); } private MethodNode processMethod(ClassNode traitClass, MethodNode methodNode, ClassNode fieldHelper, Collection<String> knownFields) { Parameter[] initialParams = methodNode.getParameters(); Parameter[] newParams = new Parameter[initialParams.length + 1]; newParams[0] = createSelfParameter(traitClass, methodNode.isStatic()); System.arraycopy(initialParams, 0, newParams, 1, initialParams.length); final int mod = methodNode.isPrivate()?ACC_PRIVATE:ACC_PUBLIC; MethodNode mNode = new MethodNode( methodNode.getName(), mod | ACC_STATIC, methodNode.getReturnType(), newParams, methodNode.getExceptions(), processBody(new VariableExpression(newParams[0]), methodNode, methodNode.getCode(), traitClass, fieldHelper, knownFields) ); mNode.setSourcePosition(methodNode); mNode.addAnnotations(filterAnnotations(methodNode.getAnnotations())); mNode.setGenericsTypes(methodNode.getGenericsTypes()); if (methodNode.isAbstract()) { mNode.setModifiers(ACC_PUBLIC | ACC_ABSTRACT); } else { methodNode.addAnnotation(new AnnotationNode(Traits.IMPLEMENTED_CLASSNODE)); } methodNode.setCode(null); if (!methodNode.isPrivate() && !methodNode.isStatic()) { methodNode.setModifiers(ACC_PUBLIC | ACC_ABSTRACT); } return mNode; } private static List<AnnotationNode> filterAnnotations(List<AnnotationNode> annotations) { List<AnnotationNode> result = new ArrayList<AnnotationNode>(annotations.size()); for (AnnotationNode annotation : annotations) { if (!OVERRIDE_CLASSNODE.equals(annotation.getClassNode())) { result.add(annotation); } } return result; } private Parameter createSelfParameter(final ClassNode traitClass, boolean isStatic) { final ClassNode rawType = traitClass.getPlainNodeReference(); ClassNode type = createReceiverType(isStatic, rawType); return new Parameter(type, isStatic?Traits.STATIC_THIS_OBJECT:Traits.THIS_OBJECT); } private ClassNode createReceiverType(final boolean isStatic, final ClassNode rawType) { ClassNode type; if (isStatic) { // Class<TraitClass> type = ClassHelper.CLASS_Type.getPlainNodeReference(); type.setGenericsTypes(new GenericsType[]{ new GenericsType(rawType) }); } else { // TraitClass type = rawType; } return type; } private Statement processBody(VariableExpression thisObject, MethodNode methodNode, Statement code, ClassNode trait, ClassNode fieldHelper, Collection<String> knownFields) { if (code == null) return null; NAryOperationRewriter operationRewriter = new NAryOperationRewriter(unit, knownFields); code.visit(operationRewriter); SuperCallTraitTransformer superTrn = new SuperCallTraitTransformer(unit); code.visit(superTrn); TraitReceiverTransformer trn = new TraitReceiverTransformer(thisObject, unit, trait, fieldHelper, knownFields); code.visit(trn); return code; } public void setCompilationUnit(final CompilationUnit unit) { this.compilationUnit = unit; } }
src/main/org/codehaus/groovy/transform/trait/TraitASTTransformation.java
/* * Copyright 2003-2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.codehaus.groovy.transform.trait; import groovy.transform.CompilationUnitAware; import org.codehaus.groovy.ast.ASTNode; import org.codehaus.groovy.ast.AnnotatedNode; import org.codehaus.groovy.ast.AnnotationNode; import org.codehaus.groovy.ast.ClassHelper; import org.codehaus.groovy.ast.ClassNode; import org.codehaus.groovy.ast.FieldNode; import org.codehaus.groovy.ast.GenericsType; import org.codehaus.groovy.ast.InnerClassNode; import org.codehaus.groovy.ast.MethodNode; import org.codehaus.groovy.ast.Parameter; import org.codehaus.groovy.ast.PropertyNode; import org.codehaus.groovy.ast.expr.ArgumentListExpression; import org.codehaus.groovy.ast.expr.BinaryExpression; import org.codehaus.groovy.ast.expr.CastExpression; import org.codehaus.groovy.ast.expr.ClassExpression; import org.codehaus.groovy.ast.expr.ConstantExpression; import org.codehaus.groovy.ast.expr.Expression; import org.codehaus.groovy.ast.expr.FieldExpression; import org.codehaus.groovy.ast.expr.MethodCallExpression; import org.codehaus.groovy.ast.expr.VariableExpression; import org.codehaus.groovy.ast.stmt.BlockStatement; import org.codehaus.groovy.ast.stmt.ExpressionStatement; import org.codehaus.groovy.ast.stmt.Statement; import org.codehaus.groovy.ast.tools.GeneralUtils; import org.codehaus.groovy.classgen.VariableScopeVisitor; import org.codehaus.groovy.classgen.Verifier; import org.codehaus.groovy.control.CompilationUnit; import org.codehaus.groovy.control.CompilePhase; import org.codehaus.groovy.control.SourceUnit; import org.codehaus.groovy.runtime.InvokerHelper; import org.codehaus.groovy.syntax.SyntaxException; import org.codehaus.groovy.syntax.Token; import org.codehaus.groovy.syntax.Types; import org.codehaus.groovy.transform.ASTTransformationCollectorCodeVisitor; import org.codehaus.groovy.transform.AbstractASTTransformation; import org.codehaus.groovy.transform.GroovyASTTransformation; import org.codehaus.groovy.transform.sc.StaticCompileTransformation; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Set; /** * Handles generation of code for the @Trait annotation. A class annotated with @Trait will generate, instead: <ul> * <li>an <i>interface</i> with the same name</li> <li>an utility inner class that will be used by the compiler to * handle the trait</li> </ul> * * @author Cedric Champeau */ @GroovyASTTransformation(phase = CompilePhase.SEMANTIC_ANALYSIS) public class TraitASTTransformation extends AbstractASTTransformation implements CompilationUnitAware { private static final ClassNode INVOKERHELPER_CLASSNODE = ClassHelper.make(InvokerHelper.class); private SourceUnit unit; private CompilationUnit compilationUnit; public void visit(ASTNode[] nodes, SourceUnit source) { AnnotatedNode parent = (AnnotatedNode) nodes[1]; AnnotationNode anno = (AnnotationNode) nodes[0]; if (!Traits.TRAIT_CLASSNODE.equals(anno.getClassNode())) return; unit = source; init(nodes, source); if (parent instanceof ClassNode) { ClassNode cNode = (ClassNode) parent; if (!checkNotInterface(cNode, Traits.TRAIT_TYPE_NAME)) return; checkNoConstructor(cNode); checkExtendsClause(cNode); replaceExtendsByImplements(cNode); createHelperClass(cNode); } } private void checkExtendsClause(final ClassNode cNode) { ClassNode superClass = cNode.getSuperClass(); if (superClass.isInterface() && !Traits.isTrait(superClass)) { addError("Trait cannot extend an interface. Use 'implements' instead", cNode); } } private void replaceExtendsByImplements(final ClassNode cNode) { ClassNode superClass = cNode.getUnresolvedSuperClass(); if (Traits.isTrait(superClass)) { // move from super class to interface; cNode.setSuperClass(ClassHelper.OBJECT_TYPE); cNode.setUnresolvedSuperClass(ClassHelper.OBJECT_TYPE); cNode.addInterface(superClass); resolveScope(cNode); } } private void resolveScope(final ClassNode cNode) { // we need to resolve again! VariableScopeVisitor scopeVisitor = new VariableScopeVisitor(unit); scopeVisitor.visitClass(cNode); } private void checkNoConstructor(final ClassNode cNode) { if (!cNode.getDeclaredConstructors().isEmpty()) { addError("Error processing trait '" + cNode.getName() + "'. " + " Constructors are not allowed.", cNode); } } private static void fixGenerics(MethodNode mn, ClassNode cNode) { if (!cNode.isUsingGenerics()) return; mn.setGenericsTypes(cNode.getGenericsTypes()); } private void createHelperClass(final ClassNode cNode) { ClassNode helper = new InnerClassNode( cNode, Traits.helperClassName(cNode), ACC_PUBLIC | ACC_STATIC | ACC_ABSTRACT | ACC_SYNTHETIC, ClassHelper.OBJECT_TYPE, ClassNode.EMPTY_ARRAY, null ); cNode.setModifiers(ACC_PUBLIC | ACC_INTERFACE | ACC_ABSTRACT); checkInnerClasses(cNode); MethodNode initializer = createInitMethod(false, cNode, helper); MethodNode staticInitializer = createInitMethod(true, cNode, helper); // apply the verifier to have the property nodes generated generatePropertyMethods(cNode); // prepare fields List<FieldNode> fields = new ArrayList<FieldNode>(); Set<String> fieldNames = new HashSet<String>(); for (FieldNode field : cNode.getFields()) { if (!"metaClass".equals(field.getName()) && (!field.isSynthetic() || field.getName().indexOf('$') < 0)) { fields.add(field); fieldNames.add(field.getName()); } } ClassNode fieldHelper = null; if (!fields.isEmpty()) { fieldHelper = new InnerClassNode( cNode, Traits.fieldHelperClassName(cNode), ACC_STATIC | ACC_PUBLIC | ACC_INTERFACE | ACC_ABSTRACT, ClassHelper.OBJECT_TYPE ); } // add methods List<MethodNode> methods = new ArrayList<MethodNode>(cNode.getMethods()); List<MethodNode> nonPublicAPIMethods = new LinkedList<MethodNode>(); for (final MethodNode methodNode : methods) { boolean declared = methodNode.getDeclaringClass() == cNode; if (declared) { if (!methodNode.isSynthetic() && (methodNode.isProtected() || methodNode.getModifiers()==0)) { unit.addError(new SyntaxException("Cannot have protected/package private method in a trait (" + cNode.getName() + "#" + methodNode.getTypeDescriptor() + ")", methodNode.getLineNumber(), methodNode.getColumnNumber())); return; } helper.addMethod(processMethod(cNode, methodNode, fieldHelper, fieldNames)); if (methodNode.isPrivate() || methodNode.isStatic()) { nonPublicAPIMethods.add(methodNode); } } } // remove methods which should not appear in the trait interface for (MethodNode privateMethod : nonPublicAPIMethods) { cNode.removeMethod(privateMethod); } // add fields for (FieldNode field : fields) { processField(field, initializer, staticInitializer, fieldHelper, cNode, fieldNames); } // clear properties to avoid generation of methods cNode.getProperties().clear(); // copy annotations copyClassAnnotations(cNode, helper); fields = new ArrayList<FieldNode>(cNode.getFields()); // reuse the full list of fields for (FieldNode field : fields) { cNode.removeField(field.getName()); } // visit AST xforms registerASTTranformations(helper); unit.getAST().addClass(helper); if (fieldHelper != null) { unit.getAST().addClass(fieldHelper); } } private MethodNode createInitMethod(final boolean isStatic, final ClassNode cNode, final ClassNode helper) { MethodNode initializer = new MethodNode( isStatic?Traits.STATIC_INIT_METHOD:Traits.INIT_METHOD, ACC_STATIC | ACC_PUBLIC | ACC_SYNTHETIC, ClassHelper.VOID_TYPE, new Parameter[]{createSelfParameter(cNode, isStatic)}, ClassNode.EMPTY_ARRAY, new BlockStatement() ); fixGenerics(initializer, cNode); helper.addMethod(initializer); AnnotationNode an = new AnnotationNode(TraitComposer.COMPILESTATIC_CLASSNODE); initializer.addAnnotation(an); cNode.addTransform(StaticCompileTransformation.class, an); return initializer; } private void registerASTTranformations(final ClassNode helper) { ASTTransformationCollectorCodeVisitor collector = new ASTTransformationCollectorCodeVisitor( unit, compilationUnit.getTransformLoader() ); collector.visitClass(helper); } /** * Copies annotation from the trait to the helper, excluding the trait annotation itself * @param cNode the trait class node * @param helper the helper class node */ private void copyClassAnnotations(final ClassNode cNode, final ClassNode helper) { List<AnnotationNode> annotations = cNode.getAnnotations(); for (AnnotationNode annotation : annotations) { if (!annotation.getClassNode().equals(Traits.TRAIT_CLASSNODE)) { helper.addAnnotation(annotation); } } } private void checkInnerClasses(final ClassNode cNode) { Iterator<InnerClassNode> it = cNode.getInnerClasses(); while (it.hasNext()) { InnerClassNode origin = it.next(); if ((origin.getModifiers() & ACC_STATIC) == 0) { unit.addError(new SyntaxException("Cannot have non-static inner class inside a trait ("+origin.getName()+")", origin.getLineNumber(), origin.getColumnNumber())); } } } private void generatePropertyMethods(final ClassNode cNode) { for (PropertyNode node : cNode.getProperties()) { processProperty(cNode, node); } } /** * Mostly copied from the {@link Verifier} class but does *not* generate bytecode * * @param cNode * @param node */ private static void processProperty(final ClassNode cNode, PropertyNode node) { String name = node.getName(); FieldNode field = node.getField(); int propNodeModifiers = node.getModifiers(); String getterName = "get" + Verifier.capitalize(name); String setterName = "set" + Verifier.capitalize(name); // GROOVY-3726: clear volatile, transient modifiers so that they don't get applied to methods if ((propNodeModifiers & Modifier.VOLATILE) != 0) { propNodeModifiers = propNodeModifiers - Modifier.VOLATILE; } if ((propNodeModifiers & Modifier.TRANSIENT) != 0) { propNodeModifiers = propNodeModifiers - Modifier.TRANSIENT; } Statement getterBlock = node.getGetterBlock(); if (getterBlock == null) { MethodNode getter = cNode.getGetterMethod(getterName); if (getter == null && ClassHelper.boolean_TYPE == node.getType()) { String secondGetterName = "is" + Verifier.capitalize(name); getter = cNode.getGetterMethod(secondGetterName); } if (!node.isPrivate() && methodNeedsReplacement(cNode, getter)) { getterBlock = new ExpressionStatement(new FieldExpression(field)); } } Statement setterBlock = node.getSetterBlock(); if (setterBlock == null) { // 2nd arg false below: though not usual, allow setter with non-void return type MethodNode setter = cNode.getSetterMethod(setterName, false); if (!node.isPrivate() && (propNodeModifiers & ACC_FINAL) == 0 && methodNeedsReplacement(cNode, setter)) { setterBlock = new ExpressionStatement( new BinaryExpression( new FieldExpression(field), Token.newSymbol(Types.EQUAL, 0, 0), new VariableExpression("value") ) ); } } if (getterBlock != null) { MethodNode getter = new MethodNode(getterName, propNodeModifiers, node.getType(), Parameter.EMPTY_ARRAY, ClassNode.EMPTY_ARRAY, getterBlock); getter.setSynthetic(true); fixGenerics(getter, cNode); cNode.addMethod(getter); if (ClassHelper.boolean_TYPE == node.getType() || ClassHelper.Boolean_TYPE == node.getType()) { String secondGetterName = "is" + Verifier.capitalize(name); MethodNode secondGetter = new MethodNode(secondGetterName, propNodeModifiers, node.getType(), Parameter.EMPTY_ARRAY, ClassNode.EMPTY_ARRAY, getterBlock); secondGetter.setSynthetic(true); fixGenerics(secondGetter, cNode); cNode.addMethod(secondGetter); } } if (setterBlock != null) { Parameter[] setterParameterTypes = {new Parameter(node.getType(), "value")}; VariableExpression var = (VariableExpression) ((BinaryExpression) ((ExpressionStatement) setterBlock).getExpression()).getRightExpression(); var.setAccessedVariable(setterParameterTypes[0]); MethodNode setter = new MethodNode(setterName, propNodeModifiers, ClassHelper.VOID_TYPE, setterParameterTypes, ClassNode.EMPTY_ARRAY, setterBlock); setter.setSynthetic(true); fixGenerics(setter, cNode); cNode.addMethod(setter); } } private static boolean methodNeedsReplacement(ClassNode classNode, MethodNode m) { // no method found, we need to replace if (m == null) return true; // method is in current class, nothing to be done if (m.getDeclaringClass() == classNode) return false; // do not overwrite final if ((m.getModifiers() & ACC_FINAL) != 0) return false; return true; } private void processField(final FieldNode field, final MethodNode initializer, final MethodNode staticInitializer, final ClassNode fieldHelper, final ClassNode trait, final Set<String> knownFields) { Expression initialExpression = field.getInitialExpression(); MethodNode selectedMethod = field.isStatic()?staticInitializer:initializer; if (initialExpression != null) { VariableExpression thisObject = new VariableExpression(selectedMethod.getParameters()[0]); ExpressionStatement initCode = new ExpressionStatement(initialExpression); processBody(thisObject, selectedMethod, initCode, trait, fieldHelper, knownFields); BlockStatement code = (BlockStatement) selectedMethod.getCode(); MethodCallExpression mce; if (field.isStatic()) { mce = new MethodCallExpression( new ClassExpression(INVOKERHELPER_CLASSNODE), "invokeStaticMethod", new ArgumentListExpression( thisObject, new ConstantExpression(Traits.helperSetterName(field)), initCode.getExpression() ) ); } else { mce = new MethodCallExpression( new CastExpression(createReceiverType(field.isStatic(), fieldHelper), thisObject), Traits.helperSetterName(field), initCode.getExpression() ); } mce.setImplicitThis(false); mce.setSourcePosition(initialExpression); code.addStatement(new ExpressionStatement(mce)); } // define setter/getter helper methods fieldHelper.addMethod( Traits.helperSetterName(field), ACC_PUBLIC | ACC_ABSTRACT, field.getOriginType(), new Parameter[]{new Parameter(field.getOriginType(), "val")}, ClassNode.EMPTY_ARRAY, null ); fieldHelper.addMethod( Traits.helperGetterName(field), ACC_PUBLIC | ACC_ABSTRACT, field.getOriginType(), Parameter.EMPTY_ARRAY, ClassNode.EMPTY_ARRAY, null ); // dummy fields are only used to carry annotations if instance field // and to differentiate from static fields otherwise String dummyFieldName = (field.isStatic() ? Traits.STATIC_FIELD_PREFIX : Traits.FIELD_PREFIX) + (field.isPublic()? Traits.PUBLIC_FIELD_PREFIX : Traits.PRIVATE_FIELD_PREFIX)+ Traits.remappedFieldName(field.getOwner(), field.getName()); FieldNode dummyField = new FieldNode( dummyFieldName, ACC_STATIC | ACC_PUBLIC | ACC_FINAL | ACC_SYNTHETIC, field.getOriginType(), fieldHelper, null ); // copy annotations from field to dummy field List<AnnotationNode> copied = new LinkedList<AnnotationNode>(); List<AnnotationNode> notCopied = new LinkedList<AnnotationNode>(); GeneralUtils.copyAnnotatedNodeAnnotations(field, copied, notCopied); dummyField.addAnnotations(copied); fieldHelper.addField(dummyField); } private MethodNode processMethod(ClassNode traitClass, MethodNode methodNode, ClassNode fieldHelper, Collection<String> knownFields) { Parameter[] initialParams = methodNode.getParameters(); Parameter[] newParams = new Parameter[initialParams.length + 1]; newParams[0] = createSelfParameter(traitClass, methodNode.isStatic()); System.arraycopy(initialParams, 0, newParams, 1, initialParams.length); final int mod = methodNode.isPrivate()?ACC_PRIVATE:ACC_PUBLIC; MethodNode mNode = new MethodNode( methodNode.getName(), mod | ACC_STATIC, methodNode.getReturnType(), newParams, methodNode.getExceptions(), processBody(new VariableExpression(newParams[0]), methodNode, methodNode.getCode(), traitClass, fieldHelper, knownFields) ); mNode.setSourcePosition(methodNode); mNode.addAnnotations(filterAnnotations(methodNode.getAnnotations())); mNode.setGenericsTypes(methodNode.getGenericsTypes()); if (methodNode.isAbstract()) { mNode.setModifiers(ACC_PUBLIC | ACC_ABSTRACT); } else { methodNode.addAnnotation(new AnnotationNode(Traits.IMPLEMENTED_CLASSNODE)); } methodNode.setCode(null); if (!methodNode.isPrivate() && !methodNode.isStatic()) { methodNode.setModifiers(ACC_PUBLIC | ACC_ABSTRACT); } return mNode; } private static List<AnnotationNode> filterAnnotations(List<AnnotationNode> annotations) { List<AnnotationNode> result = new ArrayList<AnnotationNode>(annotations.size()); for (AnnotationNode annotation : annotations) { if (annotation.getClassNode().getTypeClass() != Override.class) { result.add(annotation); } } return result; } private Parameter createSelfParameter(final ClassNode traitClass, boolean isStatic) { final ClassNode rawType = traitClass.getPlainNodeReference(); ClassNode type = createReceiverType(isStatic, rawType); return new Parameter(type, isStatic?Traits.STATIC_THIS_OBJECT:Traits.THIS_OBJECT); } private ClassNode createReceiverType(final boolean isStatic, final ClassNode rawType) { ClassNode type; if (isStatic) { // Class<TraitClass> type = ClassHelper.CLASS_Type.getPlainNodeReference(); type.setGenericsTypes(new GenericsType[]{ new GenericsType(rawType) }); } else { // TraitClass type = rawType; } return type; } private Statement processBody(VariableExpression thisObject, MethodNode methodNode, Statement code, ClassNode trait, ClassNode fieldHelper, Collection<String> knownFields) { if (code == null) return null; NAryOperationRewriter operationRewriter = new NAryOperationRewriter(unit, knownFields); code.visit(operationRewriter); SuperCallTraitTransformer superTrn = new SuperCallTraitTransformer(unit); code.visit(superTrn); TraitReceiverTransformer trn = new TraitReceiverTransformer(thisObject, unit, trait, fieldHelper, knownFields); code.visit(trn); return code; } public void setCompilationUnit(final CompilationUnit unit) { this.compilationUnit = unit; } }
Create 'Override' annotation ClassNode and using it for filtering Helper method annotations
src/main/org/codehaus/groovy/transform/trait/TraitASTTransformation.java
Create 'Override' annotation ClassNode and using it for filtering Helper method annotations
<ide><path>rc/main/org/codehaus/groovy/transform/trait/TraitASTTransformation.java <ide> <ide> private static final ClassNode INVOKERHELPER_CLASSNODE = ClassHelper.make(InvokerHelper.class); <ide> <add> private static final ClassNode OVERRIDE_CLASSNODE = ClassHelper.make(Override.class); <add> <ide> private SourceUnit unit; <ide> private CompilationUnit compilationUnit; <ide> <ide> private static List<AnnotationNode> filterAnnotations(List<AnnotationNode> annotations) { <ide> List<AnnotationNode> result = new ArrayList<AnnotationNode>(annotations.size()); <ide> for (AnnotationNode annotation : annotations) { <del> if (annotation.getClassNode().getTypeClass() != Override.class) { <add> if (!OVERRIDE_CLASSNODE.equals(annotation.getClassNode())) { <ide> result.add(annotation); <ide> } <ide> }
JavaScript
mit
eb7a0f9e1399cfa1b0a9091f1ca8e2b8f563cae8
0
goliatone/jii
(function(namespace, exportName){ exportName = exportName || 'Model'; namespace = namespace || this; //Shim for Object.create var _createObject = Object.create || function(o) { var Func; Func = function() {}; Func.prototype = o; return new Func(); }; var _isFunc = function(obj){ return (typeof obj === 'function'); }; var _isArray = function(value) { return Object.prototype.toString.call(value) === '[object Array]'; }; var _result = function(obj, property){ if (obj == null) return null; var value = obj[property]; return _isFunc(value) ? value.call(obj) : value; }; var _capitalize =function(str){ return str.charAt(0).toUpperCase() + str.slice(1); }; var Model = Class( exportName/*,EventDispatcher*/).extend({ records:{}, crecords:{}, attributes:[], configure: function(config){ this.attributes = config.attributes; // this.unbind(); }, clonesArray:function(array){ var value; var i = 0, l = array.length; var result = []; for(; i < l; i++){ value = array[i]; result.push(value.clone()); } return result; }, guid:function(){ return 'xxxxxxxx-xxxx-xxxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) { var r = Math.random()*16|0, v = c == 'x' ? r : (r&0x3|0x8); return v.toString(16); }).toUpperCase(); }, isGuid:function(guid){ guid = guid.replace(/[A-Z\d]/g,function(){return 0;}); return (guid === '00000000-0000-0000-0000-000000000000'); }, findByPk:function(id){ var record = this.records[id]; if(! record && this.isGuid(id)) return this.findByGuid(id); if(!record) return false; return record.clone(); }, findByGuid:function(guid){ var record = this.records[guid]; if(! record) return false; return record.clone(); }, exists:function(id){ return this.findByPk(id) !== false; }, refresh:function(values, options){ options = options || {}; if(options.clear){ this.records = {}; this.crecords = {}; } var records = this.fromJSON(values); if(! isArray(records)){ records = [records]; } var record; var i = 0, l = records.length; for(; i < l; i++){ record = records[i]; record.id || (record.id = record.guid); this.records[record.id] = record; this.crecords[record.guid] = record; } this.publish('refresh', this.clonesArray(records)); return this; }, select:function(filter){ var result = (function(){ var r = this.records; var results = []; for( var id in r){ record = r[id]; if(filter(record)) results.push(record); } return records; }).call(this); return this.clonesArray(result); }, findByAttribute:function(attr, value){ var r = this.records; var record, id, rvalue; for( id in r){ record = r[id]; rvalue = _result(record,attr); if( rvalue === value) return record.clone(); } return null; }, findAllByAttribute:function(name, value){ return this.select(function(item){ var rvalue = _result(item, attr); return ( rvalue === value); }); }, //////////// each:function(callback){ var r = this.records; var results = []; var key, value; for(key in r){ value = r[key]; results.push(callback(value.clone())); } return results; }, all:function(){ return this.clonesArray(this.recordsValues()); }, first:function(){ var record = this.recordsValues()[0]; //void 0, evaluates to undefined; return record ? record.clone() : 0; }, last:function(){ var values = this.recordsValues(); var record = values[values.length - 1]; return record ? record.clone() : void 0; }, count:function(){ return this.recordsValues().length; }, deleteAll:function(){ var r = this.records; var key, value; var result = []; for( key in r){ value = r[key]; result.push(delete this.records[key]); } return result; }, destroyAll:function(){ var r = this.records; var key, value; var result = []; for( key in r){ value = r[key]; result.push(this.records[key].destroy()); } return result; }, //////////// update:function(id, attrs, options){ var record = this.find(id); if(record) record.updateAttributes(atts, options); return record; }, create:function(attrs, options){ var record = new this(atts); return record.save(options); }, destroy:function(id, options){ var record = this.find(id); if(record) record.destroy(options); return record; }, change:function(callbackOrParams){ if(_isFunc(callbackOrParams)){ return this.bind('change', callbackOrParams); } else { return this.publish('change', callbackOrParams); } }, fetch:function(callbackOrParams){ if(_isFunc(callbackOrParams)){ return this.bind('fetch', callbackOrParams); } else { return this.publish('fetch', callbackOrParams); } }, toJSON:function(){ return this.recordsValues(); }, fromJSON:function(objects){ if(!objects) return; if(typeof objects === 'string'){ objects = JSON.parse(objects); } if(_isArray(objects)){ var result = []; var i = 0, l = objects.length; var value; for(; i < l; i++){ value = objects[i]; result.push(new this(value)); } return result; } else { return new this(objects); } }, fromForm:function(selector){ var model = new this(); return model.fromForm(selector); }, recordsValues:function(){ var key, value; var result = []; var r = this.records; for(key in r){ value = r[key]; result.push(value); } return result; } }).include({ init:function(attrs){ this.modelName = _capitalize(this.__name__); if(attrs) this.load(attrs); this.guid = this.constructor.guid(); }, /** * It will load all values provied in attr object * into the record. * */ load:function(attr){ var key, value, prop; for (key in attr){ value = attr[key]; _isFunc(this[key]) ? this[key](value) : this[key] = value; } return this; }, reload:function(){ if(this.isNewRecord()) return this; //TODO: load clean.attributes instead. var original = this.constructor.find(this.id); this.load(original.attributes()); //If we return this, wouldn't it be the same? return original; }, save:function(options){ options = options || {}; //Validate unless told not to. if(options.validate !== false){ if(this.isInvalid()) this.publish('error',options); } this.publish('beforeSave'); var action = this.isNewRecord() ? 'create' : 'update'; var record = this[action](options); this.publish('save', options); return record; }, create:function(options){ options = options || {}; this.publish('beforeCreate',options); if(!this.id) this.id = this.guid; var record = this.duplicate(false); //TODO: this.collection.add(this.id) this.constructor.records[this.id] = record; this.constructor.crecords[this.guid] = record; var clone = record.clone(); clone.publish('create', options); clone.publish('change:create', options); return clone; }, update:function(options){ options = options || {}; this.publish('beforeUpdate',options); //TODO: this.collection.get(this.id); var record = this.constructor.records[this.id]; record.load(this.attributes()); var clone = record.clone(); this.publish('update', options); this.publish('change:update',options); return clone; }, destroy:function(options){ options = options || {}; this.publish('beforeDestroy', options); //TODO: Move into ModelCollection. delete this.constructor.records[this.id]; delete this.constructor.crecords[this.guid]; this.destroyed = true; this.publish('destroy', options); this.publish('change::destroy', options); // this.unbind(); return this; }, duplicate:function(asNewRecord){ var result = new this.constructor(this.attributes()); if(asNewRecord === false) result.guid = this.guid; else delete result.id; return result; }, clone:function(){ return _createObject(this); }, attributes:function(){ var key; var attrs = this.constructor.attributes; var i = 0, l = attrs.length, result = {}; for(; i < l; i++ ){ key = attrs[i]; if( key in this) result[key] = _result(this, key); } if(this.id) result.id = this.id; return result; }, //TODO: Check that name is in accepted attrs. updateAttribute:function(name, value, options){ var old = this[name]; this[name] = value; //TODO: update:name to follow conventions. this.publish('update'+_capitalize(name),{old:old, value:value},options); return this.save(options); }, updateAttributes:function(values, options){ //TODO: Should we only do this if we have subscribers? //if(this.willPublish('updateAttributes')) var old = this.attributes(); this.load(values); //TODO: update:all?attributes this.publish('updateAttributes',{old:old, values:values},options); return this.save(options); }, isNewRecord:function(){ return ! this.isRecord(); }, isEqual:function(record){ if(!record) return false; if(record.constructor !== this.constructor) return false; if(record.guid !== this.guid) return false; if(record.id !== this.id) return false; return true; }, isValid:function(){ return this.validate(); }, isInvalid:function(){ return ! this.validate(); }, isRecord:function(){ //TODO: this.collection.has(this.id); return this.id && this.id in this.constructor.records; }, toString:function(){ return '['+this.__name__+' => '+" ]"; //return "<" + this.constructor.className + " (" + (JSON.stringify(this)) + ")>"; }, toJSON:function(){ return this.attributes(); }, fromJSON:function(records){ return this.load(records); } }); Model.prototype.validate = function(){ console.log('Implement validate'); }; Model.prototype.metadata = function(meta){ }; Model.prototype.errors = function(){ }; /** * * */ Model.prototype.fromForm = function(selector, keyModifier){ var inputs = $(selector).serializeArray(); var i = 0, l = inputs.length; var name; keyModifier = keyModifier || new RegExp("(^"+this.modelName+"\[)(\w+)(\]$)"); for(; i < l; i++){ key = inputs[i]; name = key.name.replace(keyModifier, "$2"); result[key.name] = key.value; } }; namespace[exportName] = Model; }).call(this);
src/model.js
(function(namespace, exportName){ exportName = exportName || 'Model'; namespace = namespace || this; //Shim for Object.create var _createObject = Object.create || function(o) { var Func; Func = function() {}; Func.prototype = o; return new Func(); }; var _isFunc = function(obj){ return (typeof obj === 'function'); }; var _isArray = function(value) { return Object.prototype.toString.call(value) === '[object Array]'; }; var _result = function(obj, property){ if (obj == null) return null; var value = obj[property]; return _isFunc(value) ? value.call(obj) : value; }; var _capitalize =function(str){ return str.charAt(0).toUpperCase() + str.slice(1); }; var Model = Class( exportName/*,EventDispatcher*/).extend({ records:{}, crecords:{}, attributes:[], configure: function(config){ this.attributes = config.attributes; // this.unbind(); }, clonesArray:function(array){ var value; var i = 0, l = array.length; var result = []; for(; i < l; i++){ value = array[i]; result.push(value.clone()); } return result; }, guid:function(){ return 'xxxxxxxx-xxxx-xxxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) { var r = Math.random()*16|0, v = c == 'x' ? r : (r&0x3|0x8); return v.toString(16); }).toUpperCase(); }, isGuid:function(guid){ guid = guid.replace(/[A-Z\d]/g,function(){return 0;}); return (guid === '00000000-0000-0000-0000-000000000000'); }, findByPk:function(id){ var record = this.records[id]; if(! record && this.isGuid(id)) return this.findByGuid(id); if(!record) return false; return record.clone(); }, findByGuid:function(guid){ var record = this.records[guid]; if(! record) return false; return record.clone(); }, exists:function(id){ return this.findByPk(id) !== false; }, refresh:function(values, options){ options = options || {}; if(options.clear){ this.records = {}; this.crecords = {}; } var records = this.fromJSON(values); if(! isArray(records)){ records = [records]; } var record; var i = 0, l = records.length; for(; i < l; i++){ record = records[i]; record.id || (record.id = record.guid); this.records[record.id] = record; this.crecords[record.guid] = record; } this.publish('refresh', this.clonesArray(records)); return this; }, select:function(filter){ var result = (function(){ var r = this.records; var results = []; for( var id in r){ record = r[id]; if(filter(record)) results.push(record); } return records; }).call(this); return this.clonesArray(result); }, findByAttribute:function(attr, value){ var r = this.records; var record, id, rvalue; for( id in r){ record = r[id]; rvalue = _result(record,attr); if( rvalue === value) return record.clone(); } return null; }, findAllByAttribute:function(name, value){ return this.select(function(item){ var rvalue = _result(item, attr); return ( rvalue === value); }); }, //////////// each:function(callback){ var r = this.records; var results = []; var key, value; for(key in r){ value = r[key]; results.push(callback(value.clone())); } return results; }, all:function(){ return this.clonesArray(this.recordsValues()); }, first:function(){ var record = this.recordsValues()[0]; //void 0, evaluates to undefined; return record ? record.clone() : 0; }, last:function(){ var values = this.recordsValues(); var record = values[values.length - 1]; return record ? record.clone() : void 0; }, count:function(){ return this.recordsValues().length; }, deleteAll:function(){ var r = this.records; var key, value; var result = []; for( key in r){ value = r[key]; result.push(delete this.records[key]); } return result; }, destroyAll:function(){ var r = this.records; var key, value; var result = []; for( key in r){ value = r[key]; result.push(this.records[key].destroy()); } return result; }, //////////// update:function(id, attrs, options){ var record = this.find(id); if(record) record.updateAttributes(atts, options); return record; }, create:function(attrs, options){ var record = new this(atts); return record.save(options); }, destroy:function(id, options){ var record = this.find(id); if(record) record.destroy(options); return record; }, change:function(callbackOrParams){ if(_isFunc(callbackOrParams)){ return this.bind('change', callbackOrParams); } else { return this.publish('change', callbackOrParams); } }, fetch:function(callbackOrParams){ if(_isFunc(callbackOrParams)){ return this.bind('fetch', callbackOrParams); } else { return this.publish('fetch', callbackOrParams); } }, toJSON:function(){ return this.recordsValues(); }, fromJSON:function(objects){ if(!objects) return; if(typeof objects === 'string'){ objects = JSON.parse(objects); } if(_isArray(objects)){ var result = []; var i = 0, l = objects.length; var value; for(; i < l; i++){ value = objects[i]; result.push(new this(value)); } return result; } else { return new this(objects); } }, fromForm:function(selector){ var model = new this(); return model.fromForm(selector); }, recordsValues:function(){ var key, value; var result = []; var r = this.records; for(key in r){ value = r[key]; result.push(value); } return result; } }).include({ init:function(attrs){ this.modelName = _capitalize(this.__name__); if(attrs) this.load(attrs); this.guid = this.constructor.guid(); }, /** * It will load all values provied in attr object * into the record. * */ load:function(attr){ var key, value, prop; for (key in attr){ value = attr[key]; _isFunc(this[key]) ? this[key](value) : this[key] = value; } return this; }, reload:function(){ if(this.isNewRecord()) return this; var original = this.constructor.find(this.id); this.load(original.attributes()); //If we return this, wouldn't it be the same? return original; }, create:function(options){ options = options || {}; this.publish('beforeCreate',options); if(!this.id) this.id = this.guid; var record = this.duplicate(false); //TODO: this.collection.add(this.id) this.constructor.records[this.id] = record; this.constructor.crecords[this.guid] = record; var clone = record.clone(); clone.publish('create', options); clone.publish('change:create', options); return clone; }, update:function(options){ options = options || {}; this.publish('beforeUpdate',options); //TODO: this.collection.get(this.id); var record = this.constructor.records[this.id]; record.load(this.attributes()); var clone = record.clone(); this.publish('update', options); this.publish('change:update',options); return clone; }, save:function(options){ options = options || {}; //Validate unless told not to. if(options.validate !== false){ if(this.isInvalid()) this.publish('error',options); } this.publish('beforeSave'); var action = this.isNewRecord() ? 'create' : 'update'; var record = this[action](options); this.publish('save', options); return record; }, destroy:function(options){ options = options || {}; this.publish('beforeDestroy', options); //TODO: Move into ModelCollection. delete this.constructor.records[this.id]; delete this.constructor.crecords[this.guid]; this.destroyed = true; this.publish('destroy', options); this.publish('change::destroy', options); // this.unbind(); return this; }, duplicate:function(asNewRecord){ var result = new this.constructor(this.attributes()); if(asNewRecord === false) result.guid = this.guid; else delete result.id; return result; }, clone:function(){ return _createObject(this); }, attributes:function(){ var key; var attrs = this.constructor.attributes; var i = 0, l = attrs.length, result = {}; for(; i < l; i++ ){ key = attrs[i]; if( key in this) result[key] = _result(this, key); } if(this.id) result.id = this.id; return result; }, //TODO: Check that name is in accepted attrs. updateAttribute:function(name, value, options){ var old = this[name]; this[name] = value; //TODO: update:name to follow conventions. this.publish('update'+_capitalize(name),{old:old, value:value},options); return this.save(options); }, updateAttributes:function(values, options){ //TODO: Should we only do this if we have subscribers? //if(this.willPublish('updateAttributes')) var old = this.attributes(); this.load(values); //TODO: update:all?attributes this.publish('updateAttributes',{old:old, values:values},options); return this.save(options); }, isNewRecord:function(){ return ! this.isRecord(); }, isEqual:function(record){ if(!record) return false; if(record.constructor !== this.constructor) return false; if(record.guid !== this.guid) return false; if(record.id !== this.id) return false; return true; }, isValid:function(){ return this.validate(); }, isInvalid:function(){ return ! this.validate(); }, isRecord:function(){ //TODO: this.collection.has(this.id); return this.id && this.id in this.constructor.records; }, toString:function(){ return '['+this.__name__+' => '+" ]"; //return "<" + this.constructor.className + " (" + (JSON.stringify(this)) + ")>"; }, toJSON:function(){ return this.attributes(); }, fromJSON:function(records){ return this.load(records); } }); Model.prototype.validate = function(){ console.log('Implement validate'); }; Model.prototype.metadata = function(meta){ }; Model.prototype.errors = function(){ }; /** * * */ Model.prototype.fromForm = function(selector, keyModifier){ var inputs = $(selector).serializeArray(); var i = 0, l = inputs.length; var name; keyModifier = keyModifier || new RegExp("(^"+this.modelName+"\[)(\w+)(\]$)"); for(; i < l; i++){ key = inputs[i]; name = key.name.replace(keyModifier, "$2"); result[key.name] = key.value; } }; namespace[exportName] = Model; }).call(this);
Model WIP.
src/model.js
Model WIP.
<ide><path>rc/model.js <ide> if(this.isNewRecord()) <ide> return this; <ide> <add> //TODO: load clean.attributes instead. <ide> var original = this.constructor.find(this.id); <ide> this.load(original.attributes()); <ide> <ide> //If we return this, wouldn't it be the same? <ide> return original; <del> }, <del> create:function(options){ <del> options = options || {}; <del> this.publish('beforeCreate',options); <del> <del> if(!this.id) this.id = this.guid; <del> <del> var record = this.duplicate(false); <del> <del> //TODO: this.collection.add(this.id) <del> this.constructor.records[this.id] = record; <del> this.constructor.crecords[this.guid] = record; <del> <del> var clone = record.clone(); <del> clone.publish('create', options); <del> clone.publish('change:create', options); <del> <del> return clone; <del> }, <del> update:function(options){ <del> options = options || {}; <del> this.publish('beforeUpdate',options); <del> <del> //TODO: this.collection.get(this.id); <del> var record = this.constructor.records[this.id]; <del> record.load(this.attributes()); <del> <del> var clone = record.clone(); <del> <del> this.publish('update', options); <del> this.publish('change:update',options); <del> <del> return clone; <ide> }, <ide> save:function(options){ <ide> options = options || {}; <ide> this.publish('save', options); <ide> <ide> return record; <add> }, <add> create:function(options){ <add> options = options || {}; <add> this.publish('beforeCreate',options); <add> <add> if(!this.id) this.id = this.guid; <add> <add> var record = this.duplicate(false); <add> <add> //TODO: this.collection.add(this.id) <add> this.constructor.records[this.id] = record; <add> this.constructor.crecords[this.guid] = record; <add> <add> var clone = record.clone(); <add> clone.publish('create', options); <add> clone.publish('change:create', options); <add> <add> return clone; <add> }, <add> update:function(options){ <add> options = options || {}; <add> this.publish('beforeUpdate',options); <add> <add> //TODO: this.collection.get(this.id); <add> var record = this.constructor.records[this.id]; <add> record.load(this.attributes()); <add> <add> var clone = record.clone(); <add> <add> this.publish('update', options); <add> this.publish('change:update',options); <add> <add> return clone; <ide> }, <ide> destroy:function(options){ <ide> options = options || {};
JavaScript
apache-2.0
547e495f6decdab93ee9e1707a26d37f627aabe5
0
llaske/sugarizer,llaske/sugarizer,llaske/sugarizer
define(['sugar-web/activity/activity', "webL10n", 'activity/Board', 'activity/vanilla-state', 'activity/patterns', 'activity/shadeColor'], function (activity, l10n, Board, State, patterns, shadeColor) { require(['domReady!'], function (doc) { activity.setup(); window.addEventListener('localized', function () { activity.getXOColor(function (err, color) { var dataStore = activity.getDatastoreObject(); main(Board, State, patterns, color, shadeColor, l10n, dataStore); }); }); }); }); function main(Board, State, patterns, color, shadeColor, l10n, dataStore) { var state = new State({ boardState: [], generation: 0, playPauseIcon: 'play', shouldPlay: false }); var randomPattern = patterns[0], gliderPattern = patterns[1], noPattern = patterns[2], blankPattern = patterns[3]; var target = document.querySelector('.main canvas'); var board = new Board(state.state.boardState, color.fill, '#FBF6F5', shadeColor(color.stroke, 10), color.stroke, 12, 12, 2, 2, target); document.querySelector('.generation-count').style.color = color.fill; document.querySelector('.generation-status').style.color = color.fill; document.querySelector('.generation-status').innerText = l10n.get('Generation'); board.draw(); var storeLocally = function storeLocally(state) { dataStore.setDataAsText({ boardState: state.boardState, generation: state.generation }); console.log('writing'); dataStore.save(function (err) { if (err) { console.log('writing failed.'); console.error(err); } else { console.log('writing saved.'); } }); }; var generateGeneration = function generateGeneration() { if (state.state.shouldPlay) { var nextGenerationBoard = state.state.boardState.map(function (row, y) { return row.map(function (cell, x) { return transformCellByRule(cell, findNeighbours(x, y)); }); }); state.set(function (prev) { return { boardState: nextGenerationBoard, generation: prev.generation + 1 }; }); setTimeout(generateGeneration, 100); } else { return 0; } }; var transformCellByRule = function transformCellByRule(cell, neighbours) { var cellIsAlive = cell === 1 || cell === 2; var aliveInNeighbour = neighbours.filter(function (cell) { return cell === 1 || cell === 2; }); var numOfAliveNeighbours = aliveInNeighbour.length; if (cellIsAlive) { if (numOfAliveNeighbours < 2) { return 0; } else if (numOfAliveNeighbours === 2 || numOfAliveNeighbours === 3) { return 1; } else { return 0; } } else { if (numOfAliveNeighbours === 3) { return 2; } else { return 0; } } }; var findNeighbours = function findNeighbours(x, y) { var leftX = x - 1 === -1 ? 49 : x - 1; var rightX = x + 1 === 50 ? 0 : x + 1; var topY = y - 1 === -1 ? 29 : y - 1; var bottomY = y + 1 === 30 ? 0 : y + 1; var boardState = state.state.boardState; var left = boardState[y][leftX]; var right = boardState[y][rightX]; var top = boardState[topY][x]; var bottom = boardState[bottomY][x]; var leftTop = boardState[topY][leftX]; var leftBottom = boardState[bottomY][leftX]; var rightTop = boardState[topY][rightX]; var rightBottom = boardState[bottomY][rightX]; return [left, right, top, bottom, leftTop, leftBottom, rightTop, rightBottom]; }; state.subscribe({ boardState: ['.fake-selector', function (fakeElem, value, prevValue) { board.update(value); }], generation: ['.generation-count', 'innerText'], playPauseIcon: ['#play-pause', function (elem, value, prevValue) { elem.className = value + ' toolbutton'; }], shouldPlay: ['.fake-selector', function (fakeElem, value, prevValue) { if (value) { generateGeneration(); } }] }); dataStore.loadAsText(function (err, metadata, data) { console.log(data) var boardState = (data && data.state) ? data.state.boardState : randomPattern(); var generation = (data && data.state) ? data.state.generation : 0 state.set({ boardState: JSON.parse(boardState), generation: parseInt(generation) }); }); board.onClick(function (cellX, cellY) { state.set(function (prev) { var newState = [].concat(prev.boardState); newState[cellY][cellX] = 2; return { boardState: newState }; }); }); document.querySelector('#play-pause').addEventListener('click', function () { state.set(function (prev) { var iconToSet = prev.playPauseIcon === 'play' ? 'pause' : 'play'; var togglePlay = prev.shouldPlay === true ? false : true; if (prev.shouldPlay) { storeLocally({ state: state.state.boardState, generation: state.state.generation }); } return { playPauseIcon: iconToSet, shouldPlay: togglePlay }; }); }); document.querySelector('#random').addEventListener('click', function () { state.set({ boardState: randomPattern(), generation: 0 }); }); document.querySelector('#glider').addEventListener('click', function () { state.set({ boardState: glider(), generation: 0 }); }); document.querySelector('#no').addEventListener('click', function () { state.set({ boardState: no(), generation: 0 }); }); document.querySelector('#clear').addEventListener('click', function () { state.set({ boardState: blankPattern(), generation: 0, playPauseIcon: 'play', shouldPlay: false }); }); window.addEventListener('resize', function (e) { board.handleResize(window.innerWidth, state.state.boardState); }); board.handleResize(window.innerWidth, state.state.boardState); }
activities/GameOfLife.activity/js/activity.js
define(['sugar-web/activity/activity', "webL10n", 'activity/Board', 'activity/vanilla-state', 'activity/patterns', 'activity/shadeColor'], function (activity, l10n, Board, State, patterns, shadeColor) { require(['domReady!'], function (doc) { activity.setup(); window.addEventListener('localized', function () { activity.getXOColor(function (err, color) { var dataStore = activity.getDatastoreObject(); main(Board, State, patterns, color, shadeColor, l10n, dataStore); }); }); }); }); function main(Board, State, patterns, color, shadeColor, l10n, dataStore) { var state = new State({ boardState: [], generation: 0, playPauseIcon: 'play', shouldPlay: false }); var randomPattern = patterns[0], gliderPattern = patterns[1], noPattern = patterns[2], blankPattern = patterns[3]; var target = document.querySelector('.main canvas'); var board = new Board(state.state.boardState, color.fill, '#FBF6F5', shadeColor(color.stroke, 10), color.stroke, 12, 12, 2, 2, target); document.querySelector('.generation-count').style.color = color.fill; document.querySelector('.generation-status').style.color = color.fill; document.querySelector('.generation-status').innerText = l10n.get('Generation'); board.draw(); var storeLocally = function storeLocally(state) { dataStore.setDataAsText({ boardState: state.boarState, generation: state.generation }); console.log('writing'); dataStore.save(function (err) { if (err) { console.log('writing failed.'); console.error(err); } else { console.log('writing saved.'); } }); }; var generateGeneration = function generateGeneration() { if (state.state.shouldPlay) { var nextGenerationBoard = state.state.boardState.map(function (row, y) { return row.map(function (cell, x) { return transformCellByRule(cell, findNeighbours(x, y)); }); }); state.set(function (prev) { return { boardState: nextGenerationBoard, generation: prev.generation + 1 }; }); setTimeout(generateGeneration, 100); } else { return 0; } }; var transformCellByRule = function transformCellByRule(cell, neighbours) { var cellIsAlive = cell === 1 || cell === 2; var aliveInNeighbour = neighbours.filter(function (cell) { return cell === 1 || cell === 2; }); var numOfAliveNeighbours = aliveInNeighbour.length; if (cellIsAlive) { if (numOfAliveNeighbours < 2) { return 0; } else if (numOfAliveNeighbours === 2 || numOfAliveNeighbours === 3) { return 1; } else { return 0; } } else { if (numOfAliveNeighbours === 3) { return 2; } else { return 0; } } }; var findNeighbours = function findNeighbours(x, y) { var leftX = x - 1 === -1 ? 49 : x - 1; var rightX = x + 1 === 50 ? 0 : x + 1; var topY = y - 1 === -1 ? 29 : y - 1; var bottomY = y + 1 === 30 ? 0 : y + 1; var boardState = state.state.boardState; var left = boardState[y][leftX]; var right = boardState[y][rightX]; var top = boardState[topY][x]; var bottom = boardState[bottomY][x]; var leftTop = boardState[topY][leftX]; var leftBottom = boardState[bottomY][leftX]; var rightTop = boardState[topY][rightX]; var rightBottom = boardState[bottomY][rightX]; return [left, right, top, bottom, leftTop, leftBottom, rightTop, rightBottom]; }; state.subscribe({ boardState: ['.fake-selector', function (fakeElem, value, prevValue) { board.update(value); }], generation: ['.generation-count', 'innerText'], playPauseIcon: ['#play-pause', function (elem, value, prevValue) { elem.className = value + ' toolbutton'; }], shouldPlay: ['.fake-selector', function (fakeElem, value, prevValue) { if (value) { generateGeneration(); } }] }); dataStore.loadAsText(function (err, metadata, data) { console.log(data) var boardState = (data && data.state) ? data.state.boardState : randomPattern(); var generation = (data && data.state) ? data.state.generation : 0 state.set({ boardState: JSON.parse(boardState), generation: parseInt(generation) }); }); board.onClick(function (cellX, cellY) { state.set(function (prev) { var newState = [].concat(prev.boardState); newState[cellY][cellX] = 2; return { boardState: newState }; }); }); document.querySelector('#play-pause').addEventListener('click', function () { state.set(function (prev) { var iconToSet = prev.playPauseIcon === 'play' ? 'pause' : 'play'; var togglePlay = prev.shouldPlay === true ? false : true; if (prev.shouldPlay) { storeLocally({ state: state.state.boardState, generation: state.state.generation }); } return { playPauseIcon: iconToSet, shouldPlay: togglePlay }; }); }); document.querySelector('#random').addEventListener('click', function () { state.set({ boardState: randomPattern(), generation: 0 }); }); document.querySelector('#glider').addEventListener('click', function () { state.set({ boardState: glider(), generation: 0 }); }); document.querySelector('#no').addEventListener('click', function () { state.set({ boardState: no(), generation: 0 }); }); document.querySelector('#clear').addEventListener('click', function () { state.set({ boardState: blankPattern(), generation: 0, playPauseIcon: 'play', shouldPlay: false }); }); window.addEventListener('resize', function (e) { board.handleResize(window.innerWidth, state.state.boardState); }); board.handleResize(window.innerWidth, state.state.boardState); }
fixed typo
activities/GameOfLife.activity/js/activity.js
fixed typo
<ide><path>ctivities/GameOfLife.activity/js/activity.js <ide> <ide> var storeLocally = function storeLocally(state) { <ide> dataStore.setDataAsText({ <del> boardState: state.boarState, <add> boardState: state.boardState, <ide> generation: state.generation <ide> }); <ide> console.log('writing');
Java
mit
b5f1b3dffc9c1bc4cf91a96528fc17a4942458b6
0
janicduplessis/react-native,facebook/react-native,janicduplessis/react-native,janicduplessis/react-native,javache/react-native,janicduplessis/react-native,javache/react-native,javache/react-native,facebook/react-native,javache/react-native,facebook/react-native,facebook/react-native,javache/react-native,javache/react-native,javache/react-native,janicduplessis/react-native,facebook/react-native,javache/react-native,janicduplessis/react-native,facebook/react-native,facebook/react-native,javache/react-native,janicduplessis/react-native,janicduplessis/react-native,facebook/react-native,facebook/react-native
/* * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ package com.facebook.react.animated; import android.util.SparseArray; import androidx.annotation.Nullable; import androidx.annotation.UiThread; import com.facebook.common.logging.FLog; import com.facebook.react.bridge.Arguments; import com.facebook.react.bridge.Callback; import com.facebook.react.bridge.JSApplicationCausedNativeException; import com.facebook.react.bridge.JSApplicationIllegalArgumentException; import com.facebook.react.bridge.ReactApplicationContext; import com.facebook.react.bridge.ReactNoCrashSoftException; import com.facebook.react.bridge.ReactSoftExceptionLogger; import com.facebook.react.bridge.ReadableArray; import com.facebook.react.bridge.ReadableMap; import com.facebook.react.bridge.UIManager; import com.facebook.react.bridge.UiThreadUtil; import com.facebook.react.bridge.WritableMap; import com.facebook.react.modules.core.DeviceEventManagerModule; import com.facebook.react.uimanager.UIManagerHelper; import com.facebook.react.uimanager.common.UIManagerType; import com.facebook.react.uimanager.events.Event; import com.facebook.react.uimanager.events.EventDispatcher; import com.facebook.react.uimanager.events.EventDispatcherListener; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.Queue; /** * This is the main class that coordinates how native animated JS implementation drives UI changes. * * <p>It implements a management interface for animated nodes graph as well as implements a graph * traversal algorithm that is run for each animation frame. * * <p>For each animation frame we visit animated nodes that might've been updated as well as their * children that may use parent's values to update themselves. At the end of the traversal algorithm * we expect to reach a special type of the node: PropsAnimatedNode that is then responsible for * calculating property map which can be sent to native view hierarchy to update the view. * * <p>IMPORTANT: This class should be accessed only from the UI Thread */ public class NativeAnimatedNodesManager implements EventDispatcherListener { private static final String TAG = "NativeAnimatedNodesManager"; private final SparseArray<AnimatedNode> mAnimatedNodes = new SparseArray<>(); private final SparseArray<AnimationDriver> mActiveAnimations = new SparseArray<>(); private final SparseArray<AnimatedNode> mUpdatedNodes = new SparseArray<>(); // Mapping of a view tag and an event name to a list of event animation drivers. 99% of the time // there will be only one driver per mapping so all code code should be optimized around that. private final Map<String, List<EventAnimationDriver>> mEventDrivers = new HashMap<>(); private final ReactApplicationContext mReactApplicationContext; private int mAnimatedGraphBFSColor = 0; // Used to avoid allocating a new array on every frame in `runUpdates` and `onEventDispatch`. private final List<AnimatedNode> mRunUpdateNodeList = new LinkedList<>(); private boolean mEventListenerInitializedForFabric = false; private boolean mEventListenerInitializedForNonFabric = false; private boolean mWarnedAboutGraphTraversal = false; public NativeAnimatedNodesManager(ReactApplicationContext reactApplicationContext) { mReactApplicationContext = reactApplicationContext; } /** * Initialize event listeners for Fabric UIManager or non-Fabric UIManager, exactly once. Once * Fabric is the only UIManager, this logic can be simplified. This is expected to only be called * from the native module thread. * * @param uiManagerType */ public void initializeEventListenerForUIManagerType(@UIManagerType final int uiManagerType) { if (uiManagerType == UIManagerType.FABRIC ? mEventListenerInitializedForFabric : mEventListenerInitializedForNonFabric) { return; } UIManager uiManager = UIManagerHelper.getUIManager(mReactApplicationContext, uiManagerType); if (uiManager != null) { uiManager.<EventDispatcher>getEventDispatcher().addListener(this); if (uiManagerType == UIManagerType.FABRIC) { mEventListenerInitializedForFabric = true; } else { mEventListenerInitializedForNonFabric = true; } } } @Nullable public AnimatedNode getNodeById(int id) { return mAnimatedNodes.get(id); } public boolean hasActiveAnimations() { return mActiveAnimations.size() > 0 || mUpdatedNodes.size() > 0; } @UiThread public void createAnimatedNode(int tag, ReadableMap config) { if (mAnimatedNodes.get(tag) != null) { throw new JSApplicationIllegalArgumentException( "createAnimatedNode: Animated node [" + tag + "] already exists"); } String type = config.getString("type"); final AnimatedNode node; if ("style".equals(type)) { node = new StyleAnimatedNode(config, this); } else if ("value".equals(type)) { node = new ValueAnimatedNode(config); } else if ("color".equals(type)) { node = new ColorAnimatedNode(config, this, mReactApplicationContext); } else if ("props".equals(type)) { node = new PropsAnimatedNode(config, this); } else if ("interpolation".equals(type)) { node = new InterpolationAnimatedNode(config); } else if ("addition".equals(type)) { node = new AdditionAnimatedNode(config, this); } else if ("subtraction".equals(type)) { node = new SubtractionAnimatedNode(config, this); } else if ("division".equals(type)) { node = new DivisionAnimatedNode(config, this); } else if ("multiplication".equals(type)) { node = new MultiplicationAnimatedNode(config, this); } else if ("modulus".equals(type)) { node = new ModulusAnimatedNode(config, this); } else if ("diffclamp".equals(type)) { node = new DiffClampAnimatedNode(config, this); } else if ("transform".equals(type)) { node = new TransformAnimatedNode(config, this); } else if ("tracking".equals(type)) { node = new TrackingAnimatedNode(config, this); } else { throw new JSApplicationIllegalArgumentException("Unsupported node type: " + type); } node.mTag = tag; mAnimatedNodes.put(tag, node); mUpdatedNodes.put(tag, node); } @UiThread public void updateAnimatedNodeConfig(int tag, ReadableMap config) { AnimatedNode node = mAnimatedNodes.get(tag); if (node == null) { throw new JSApplicationIllegalArgumentException( "updateAnimatedNode: Animated node [" + tag + "] does not exist"); } if (node instanceof AnimatedNodeWithUpdateableConfig) { stopAnimationsForNode(node); ((AnimatedNodeWithUpdateableConfig) node).onUpdateConfig(config); mUpdatedNodes.put(tag, node); } } @UiThread public void dropAnimatedNode(int tag) { mAnimatedNodes.remove(tag); mUpdatedNodes.remove(tag); } @UiThread public void startListeningToAnimatedNodeValue(int tag, AnimatedNodeValueListener listener) { AnimatedNode node = mAnimatedNodes.get(tag); if (node == null || !(node instanceof ValueAnimatedNode)) { throw new JSApplicationIllegalArgumentException( "startListeningToAnimatedNodeValue: Animated node [" + tag + "] does not exist, or is not a 'value' node"); } ((ValueAnimatedNode) node).setValueListener(listener); } @UiThread public void stopListeningToAnimatedNodeValue(int tag) { AnimatedNode node = mAnimatedNodes.get(tag); if (node == null || !(node instanceof ValueAnimatedNode)) { throw new JSApplicationIllegalArgumentException( "startListeningToAnimatedNodeValue: Animated node [" + tag + "] does not exist, or is not a 'value' node"); } ((ValueAnimatedNode) node).setValueListener(null); } @UiThread public void setAnimatedNodeValue(int tag, double value) { AnimatedNode node = mAnimatedNodes.get(tag); if (node == null || !(node instanceof ValueAnimatedNode)) { throw new JSApplicationIllegalArgumentException( "setAnimatedNodeValue: Animated node [" + tag + "] does not exist, or is not a 'value' node"); } stopAnimationsForNode(node); ((ValueAnimatedNode) node).mValue = value; mUpdatedNodes.put(tag, node); } @UiThread public void setAnimatedNodeOffset(int tag, double offset) { AnimatedNode node = mAnimatedNodes.get(tag); if (node == null || !(node instanceof ValueAnimatedNode)) { throw new JSApplicationIllegalArgumentException( "setAnimatedNodeOffset: Animated node [" + tag + "] does not exist, or is not a 'value' node"); } ((ValueAnimatedNode) node).mOffset = offset; mUpdatedNodes.put(tag, node); } @UiThread public void flattenAnimatedNodeOffset(int tag) { AnimatedNode node = mAnimatedNodes.get(tag); if (node == null || !(node instanceof ValueAnimatedNode)) { throw new JSApplicationIllegalArgumentException( "flattenAnimatedNodeOffset: Animated node [" + tag + "] does not exist, or is not a 'value' node"); } ((ValueAnimatedNode) node).flattenOffset(); } @UiThread public void extractAnimatedNodeOffset(int tag) { AnimatedNode node = mAnimatedNodes.get(tag); if (node == null || !(node instanceof ValueAnimatedNode)) { throw new JSApplicationIllegalArgumentException( "extractAnimatedNodeOffset: Animated node [" + tag + "] does not exist, or is not a 'value' node"); } ((ValueAnimatedNode) node).extractOffset(); } @UiThread public void startAnimatingNode( int animationId, int animatedNodeTag, ReadableMap animationConfig, Callback endCallback) { AnimatedNode node = mAnimatedNodes.get(animatedNodeTag); if (node == null) { throw new JSApplicationIllegalArgumentException( "startAnimatingNode: Animated node [" + animatedNodeTag + "] does not exist"); } if (!(node instanceof ValueAnimatedNode)) { throw new JSApplicationIllegalArgumentException( "startAnimatingNode: Animated node [" + animatedNodeTag + "] should be of type " + ValueAnimatedNode.class.getName()); } final AnimationDriver existingDriver = mActiveAnimations.get(animationId); if (existingDriver != null) { // animation with the given ID is already running, we need to update its configuration instead // of spawning a new one existingDriver.resetConfig(animationConfig); return; } String type = animationConfig.getString("type"); final AnimationDriver animation; if ("frames".equals(type)) { animation = new FrameBasedAnimationDriver(animationConfig); } else if ("spring".equals(type)) { animation = new SpringAnimation(animationConfig); } else if ("decay".equals(type)) { animation = new DecayAnimation(animationConfig); } else { throw new JSApplicationIllegalArgumentException( "startAnimatingNode: Unsupported animation type [" + animatedNodeTag + "]: " + type); } animation.mId = animationId; animation.mEndCallback = endCallback; animation.mAnimatedValue = (ValueAnimatedNode) node; mActiveAnimations.put(animationId, animation); } @UiThread private void stopAnimationsForNode(AnimatedNode animatedNode) { // in most of the cases there should never be more than a few active animations running at the // same time. Therefore it does not make much sense to create an animationId -> animation // object map that would require additional memory just to support the use-case of stopping // an animation for (int i = 0; i < mActiveAnimations.size(); i++) { AnimationDriver animation = mActiveAnimations.valueAt(i); if (animatedNode.equals(animation.mAnimatedValue)) { if (animation.mEndCallback != null) { // Invoke animation end callback with {finished: false} WritableMap endCallbackResponse = Arguments.createMap(); endCallbackResponse.putBoolean("finished", false); animation.mEndCallback.invoke(endCallbackResponse); } else if (mReactApplicationContext != null) { // If no callback is passed in, this /may/ be an animation set up by the single-op // instruction from JS, meaning that no jsi::functions are passed into native and // we communicate via RCTDeviceEventEmitter instead of callbacks. WritableMap params = Arguments.createMap(); params.putInt("animationId", animation.mId); params.putBoolean("finished", false); mReactApplicationContext .getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class) .emit("onNativeAnimatedModuleAnimationFinished", params); } mActiveAnimations.removeAt(i); i--; } } } @UiThread public void stopAnimation(int animationId) { // in most of the cases there should never be more than a few active animations running at the // same time. Therefore it does not make much sense to create an animationId -> animation // object map that would require additional memory just to support the use-case of stopping // an animation for (int i = 0; i < mActiveAnimations.size(); i++) { AnimationDriver animation = mActiveAnimations.valueAt(i); if (animation.mId == animationId) { if (animation.mEndCallback != null) { // Invoke animation end callback with {finished: false} WritableMap endCallbackResponse = Arguments.createMap(); endCallbackResponse.putBoolean("finished", false); animation.mEndCallback.invoke(endCallbackResponse); } else if (mReactApplicationContext != null) { // If no callback is passed in, this /may/ be an animation set up by the single-op // instruction from JS, meaning that no jsi::functions are passed into native and // we communicate via RCTDeviceEventEmitter instead of callbacks. WritableMap params = Arguments.createMap(); params.putInt("animationId", animation.mId); params.putBoolean("finished", false); mReactApplicationContext .getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class) .emit("onNativeAnimatedModuleAnimationFinished", params); } mActiveAnimations.removeAt(i); return; } } // Do not throw an error in the case animation could not be found. We only keep "active" // animations in the registry and there is a chance that Animated.js will enqueue a // stopAnimation call after the animation has ended or the call will reach native thread only // when the animation is already over. } @UiThread public void connectAnimatedNodes(int parentNodeTag, int childNodeTag) { AnimatedNode parentNode = mAnimatedNodes.get(parentNodeTag); if (parentNode == null) { throw new JSApplicationIllegalArgumentException( "connectAnimatedNodes: Animated node with tag (parent) [" + parentNodeTag + "] does not exist"); } AnimatedNode childNode = mAnimatedNodes.get(childNodeTag); if (childNode == null) { throw new JSApplicationIllegalArgumentException( "connectAnimatedNodes: Animated node with tag (child) [" + childNodeTag + "] does not exist"); } parentNode.addChild(childNode); mUpdatedNodes.put(childNodeTag, childNode); } public void disconnectAnimatedNodes(int parentNodeTag, int childNodeTag) { AnimatedNode parentNode = mAnimatedNodes.get(parentNodeTag); if (parentNode == null) { throw new JSApplicationIllegalArgumentException( "disconnectAnimatedNodes: Animated node with tag (parent) [" + parentNodeTag + "] does not exist"); } AnimatedNode childNode = mAnimatedNodes.get(childNodeTag); if (childNode == null) { throw new JSApplicationIllegalArgumentException( "disconnectAnimatedNodes: Animated node with tag (child) [" + childNodeTag + "] does not exist"); } parentNode.removeChild(childNode); mUpdatedNodes.put(childNodeTag, childNode); } @UiThread public void connectAnimatedNodeToView(int animatedNodeTag, int viewTag) { AnimatedNode node = mAnimatedNodes.get(animatedNodeTag); if (node == null) { throw new JSApplicationIllegalArgumentException( "connectAnimatedNodeToView: Animated node with tag [" + animatedNodeTag + "] does not exist"); } if (!(node instanceof PropsAnimatedNode)) { throw new JSApplicationIllegalArgumentException( "connectAnimatedNodeToView: Animated node connected to view [" + viewTag + "] should be of type " + PropsAnimatedNode.class.getName()); } if (mReactApplicationContext == null) { throw new IllegalStateException( "connectAnimatedNodeToView: Animated node could not be connected, no ReactApplicationContext: " + viewTag); } @Nullable UIManager uiManager = UIManagerHelper.getUIManagerForReactTag(mReactApplicationContext, viewTag); if (uiManager == null) { ReactSoftExceptionLogger.logSoftException( TAG, new ReactNoCrashSoftException( "connectAnimatedNodeToView: Animated node could not be connected to UIManager - uiManager disappeared for tag: " + viewTag)); return; } PropsAnimatedNode propsAnimatedNode = (PropsAnimatedNode) node; propsAnimatedNode.connectToView(viewTag, uiManager); mUpdatedNodes.put(animatedNodeTag, node); } @UiThread public void disconnectAnimatedNodeFromView(int animatedNodeTag, int viewTag) { AnimatedNode node = mAnimatedNodes.get(animatedNodeTag); if (node == null) { throw new JSApplicationIllegalArgumentException( "disconnectAnimatedNodeFromView: Animated node with tag [" + animatedNodeTag + "] does not exist"); } if (!(node instanceof PropsAnimatedNode)) { throw new JSApplicationIllegalArgumentException( "disconnectAnimatedNodeFromView: Animated node connected to view [" + viewTag + "] should be of type " + PropsAnimatedNode.class.getName()); } PropsAnimatedNode propsAnimatedNode = (PropsAnimatedNode) node; propsAnimatedNode.disconnectFromView(viewTag); } @UiThread public void getValue(int tag, Callback callback) { AnimatedNode node = mAnimatedNodes.get(tag); if (node == null || !(node instanceof ValueAnimatedNode)) { throw new JSApplicationIllegalArgumentException( "getValue: Animated node with tag [" + tag + "] does not exist or is not a 'value' node"); } double value = ((ValueAnimatedNode) node).getValue(); if (callback != null) { callback.invoke(value); return; } // If there's no callback, that means that JS is using the single-operation mode, and not // passing any callbacks into Java. // See NativeAnimatedHelper.js for details. // Instead, we use RCTDeviceEventEmitter to pass data back to JS and emulate callbacks. if (mReactApplicationContext == null) { return; } WritableMap params = Arguments.createMap(); params.putInt("tag", tag); params.putDouble("value", value); mReactApplicationContext .getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class) .emit("onNativeAnimatedModuleGetValue", params); } @UiThread public void restoreDefaultValues(int animatedNodeTag) { AnimatedNode node = mAnimatedNodes.get(animatedNodeTag); // Restoring default values needs to happen before UIManager operations so it is // possible the node hasn't been created yet if it is being connected and // disconnected in the same batch. In that case we don't need to restore // default values since it will never actually update the view. if (node == null) { return; } if (!(node instanceof PropsAnimatedNode)) { throw new JSApplicationIllegalArgumentException( "Animated node connected to view [?] should be of type " + PropsAnimatedNode.class.getName()); } PropsAnimatedNode propsAnimatedNode = (PropsAnimatedNode) node; propsAnimatedNode.restoreDefaultValues(); } @UiThread public void addAnimatedEventToView(int viewTag, String eventName, ReadableMap eventMapping) { int nodeTag = eventMapping.getInt("animatedValueTag"); AnimatedNode node = mAnimatedNodes.get(nodeTag); if (node == null) { throw new JSApplicationIllegalArgumentException( "addAnimatedEventToView: Animated node with tag [" + nodeTag + "] does not exist"); } if (!(node instanceof ValueAnimatedNode)) { throw new JSApplicationIllegalArgumentException( "addAnimatedEventToView: Animated node on view [" + viewTag + "] connected to event (" + eventName + ") should be of type " + ValueAnimatedNode.class.getName()); } ReadableArray path = eventMapping.getArray("nativeEventPath"); List<String> pathList = new ArrayList<>(path.size()); for (int i = 0; i < path.size(); i++) { pathList.add(path.getString(i)); } EventAnimationDriver event = new EventAnimationDriver(pathList, (ValueAnimatedNode) node); String key = viewTag + eventName; if (mEventDrivers.containsKey(key)) { mEventDrivers.get(key).add(event); } else { List<EventAnimationDriver> drivers = new ArrayList<>(1); drivers.add(event); mEventDrivers.put(key, drivers); } } @UiThread public void removeAnimatedEventFromView(int viewTag, String eventName, int animatedValueTag) { String key = viewTag + eventName; if (mEventDrivers.containsKey(key)) { List<EventAnimationDriver> driversForKey = mEventDrivers.get(key); if (driversForKey.size() == 1) { mEventDrivers.remove(viewTag + eventName); } else { ListIterator<EventAnimationDriver> it = driversForKey.listIterator(); while (it.hasNext()) { if (it.next().mValueNode.mTag == animatedValueTag) { it.remove(); break; } } } } } @Override public void onEventDispatch(final Event event) { // Events can be dispatched from any thread so we have to make sure handleEvent is run from the // UI thread. if (UiThreadUtil.isOnUiThread()) { handleEvent(event); } else { UiThreadUtil.runOnUiThread( new Runnable() { @Override public void run() { handleEvent(event); } }); } } @UiThread private void handleEvent(Event event) { if (!mEventDrivers.isEmpty()) { // If the event has a different name in native convert it to it's JS name. // TODO T64216139 Remove dependency of UIManagerModule when the Constants are not in Native // anymore if (mReactApplicationContext == null) { return; } UIManager uiManager = UIManagerHelper.getUIManager(mReactApplicationContext, event.getUIManagerType()); if (uiManager == null) { return; } String eventName = uiManager.resolveCustomDirectEventName(event.getEventName()); if (eventName == null) { eventName = ""; } List<EventAnimationDriver> driversForKey = mEventDrivers.get(event.getViewTag() + eventName); if (driversForKey != null) { for (EventAnimationDriver driver : driversForKey) { stopAnimationsForNode(driver.mValueNode); event.dispatch(driver); mRunUpdateNodeList.add(driver.mValueNode); } updateNodes(mRunUpdateNodeList); mRunUpdateNodeList.clear(); } } } /** * Animation loop performs two BFSes over the graph of animated nodes. We use incremented {@code * mAnimatedGraphBFSColor} to mark nodes as visited in each of the BFSes which saves additional * loops for clearing "visited" states. * * <p>First BFS starts with nodes that are in {@code mUpdatedNodes} (that is, their value have * been modified from JS in the last batch of JS operations) or directly attached to an active * animation (hence linked to objects from {@code mActiveAnimations}). In that step we calculate * an attribute {@code mActiveIncomingNodes}. The second BFS runs in topological order over the * sub-graph of *active* nodes. This is done by adding node to the BFS queue only if all its * "predecessors" have already been visited. */ @UiThread public void runUpdates(long frameTimeNanos) { UiThreadUtil.assertOnUiThread(); boolean hasFinishedAnimations = false; for (int i = 0; i < mUpdatedNodes.size(); i++) { AnimatedNode node = mUpdatedNodes.valueAt(i); mRunUpdateNodeList.add(node); } // Clean mUpdatedNodes queue mUpdatedNodes.clear(); for (int i = 0; i < mActiveAnimations.size(); i++) { AnimationDriver animation = mActiveAnimations.valueAt(i); animation.runAnimationStep(frameTimeNanos); AnimatedNode valueNode = animation.mAnimatedValue; mRunUpdateNodeList.add(valueNode); if (animation.mHasFinished) { hasFinishedAnimations = true; } } updateNodes(mRunUpdateNodeList); mRunUpdateNodeList.clear(); // Cleanup finished animations. Iterate over the array of animations and override ones that has // finished, then resize `mActiveAnimations`. if (hasFinishedAnimations) { for (int i = mActiveAnimations.size() - 1; i >= 0; i--) { AnimationDriver animation = mActiveAnimations.valueAt(i); if (animation.mHasFinished) { if (animation.mEndCallback != null) { WritableMap endCallbackResponse = Arguments.createMap(); endCallbackResponse.putBoolean("finished", true); animation.mEndCallback.invoke(endCallbackResponse); } else if (mReactApplicationContext != null) { // If no callback is passed in, this /may/ be an animation set up by the single-op // instruction from JS, meaning that no jsi::functions are passed into native and // we communicate via RCTDeviceEventEmitter instead of callbacks. WritableMap params = Arguments.createMap(); params.putInt("animationId", animation.mId); params.putBoolean("finished", true); DeviceEventManagerModule.RCTDeviceEventEmitter eventEmitter = mReactApplicationContext.getJSModule( DeviceEventManagerModule.RCTDeviceEventEmitter.class); if (eventEmitter != null) { eventEmitter.emit("onNativeAnimatedModuleAnimationFinished", params); } } mActiveAnimations.removeAt(i); } } } } @UiThread private void updateNodes(List<AnimatedNode> nodes) { int activeNodesCount = 0; int updatedNodesCount = 0; // STEP 1. // BFS over graph of nodes. Update `mIncomingNodes` attribute for each node during that BFS. // Store number of visited nodes in `activeNodesCount`. We "execute" active animations as a part // of this step. mAnimatedGraphBFSColor++; /* use new color */ if (mAnimatedGraphBFSColor == AnimatedNode.INITIAL_BFS_COLOR) { // value "0" is used as an initial color for a new node, using it in BFS may cause some nodes // to be skipped. mAnimatedGraphBFSColor++; } Queue<AnimatedNode> nodesQueue = new ArrayDeque<>(); for (AnimatedNode node : nodes) { if (node.mBFSColor != mAnimatedGraphBFSColor) { node.mBFSColor = mAnimatedGraphBFSColor; activeNodesCount++; nodesQueue.add(node); } } while (!nodesQueue.isEmpty()) { AnimatedNode nextNode = nodesQueue.poll(); if (nextNode.mChildren != null) { for (int i = 0; i < nextNode.mChildren.size(); i++) { AnimatedNode child = nextNode.mChildren.get(i); child.mActiveIncomingNodes++; if (child.mBFSColor != mAnimatedGraphBFSColor) { child.mBFSColor = mAnimatedGraphBFSColor; activeNodesCount++; nodesQueue.add(child); } } } } // STEP 2 // BFS over the graph of active nodes in topological order -> visit node only when all its // "predecessors" in the graph have already been visited. It is important to visit nodes in that // order as they may often use values of their predecessors in order to calculate "next state" // of their own. We start by determining the starting set of nodes by looking for nodes with // `mActiveIncomingNodes = 0` (those can only be the ones that we start BFS in the previous // step). We store number of visited nodes in this step in `updatedNodesCount` mAnimatedGraphBFSColor++; if (mAnimatedGraphBFSColor == AnimatedNode.INITIAL_BFS_COLOR) { // see reasoning for this check a few lines above mAnimatedGraphBFSColor++; } // find nodes with zero "incoming nodes", those can be either nodes from `mUpdatedNodes` or // ones connected to active animations for (AnimatedNode node : nodes) { if (node.mActiveIncomingNodes == 0 && node.mBFSColor != mAnimatedGraphBFSColor) { node.mBFSColor = mAnimatedGraphBFSColor; updatedNodesCount++; nodesQueue.add(node); } } // Run main "update" loop int cyclesDetected = 0; while (!nodesQueue.isEmpty()) { AnimatedNode nextNode = nodesQueue.poll(); try { nextNode.update(); if (nextNode instanceof PropsAnimatedNode) { // Send property updates to native view manager ((PropsAnimatedNode) nextNode).updateView(); } } catch (JSApplicationCausedNativeException e) { // An exception is thrown if the view hasn't been created yet. This can happen because // views are created in batches. If this particular view didn't make it into a batch yet, // the view won't exist and an exception will be thrown when attempting to start an // animation on it. // // Eat the exception rather than crashing. The impact is that we may drop one or more // frames of the animation. FLog.e(TAG, "Native animation workaround, frame lost as result of race condition", e); } if (nextNode instanceof ValueAnimatedNode) { // Potentially send events to JS when the node's value is updated ((ValueAnimatedNode) nextNode).onValueUpdate(); } if (nextNode.mChildren != null) { for (int i = 0; i < nextNode.mChildren.size(); i++) { AnimatedNode child = nextNode.mChildren.get(i); child.mActiveIncomingNodes--; if (child.mBFSColor != mAnimatedGraphBFSColor && child.mActiveIncomingNodes == 0) { child.mBFSColor = mAnimatedGraphBFSColor; updatedNodesCount++; nodesQueue.add(child); } else if (child.mBFSColor == mAnimatedGraphBFSColor) { cyclesDetected++; } } } } // Verify that we've visited *all* active nodes. Throw otherwise as this could mean there is a // cycle in animated node graph, or that the graph is only partially set up. We also take // advantage of the fact that all active nodes are visited in the step above so that all the // nodes properties `mActiveIncomingNodes` are set to zero. // In Fabric there can be race conditions between the JS thread setting up or tearing down // animated nodes, and Fabric executing them on the UI thread, leading to temporary inconsistent // states. if (activeNodesCount != updatedNodesCount) { if (mWarnedAboutGraphTraversal) { return; } mWarnedAboutGraphTraversal = true; // Before crashing or logging soft exception, log details about current graph setup FLog.e(TAG, "Detected animation cycle or disconnected graph. "); for (AnimatedNode node : nodes) { FLog.e(TAG, node.prettyPrintWithChildren()); } // If we're running only in non-Fabric, we still throw an exception. // In Fabric, it seems that animations enter an inconsistent state fairly often. // We detect if the inconsistency is due to a cycle (a fatal error for which we must crash) // or disconnected regions, indicating a partially-set-up animation graph, which is not // fatal and can stay a warning. String reason = cyclesDetected > 0 ? "cycles (" + cyclesDetected + ")" : "disconnected regions"; IllegalStateException ex = new IllegalStateException( "Looks like animated nodes graph has " + reason + ", there are " + activeNodesCount + " but toposort visited only " + updatedNodesCount); if (mEventListenerInitializedForFabric && cyclesDetected == 0) { // TODO T71377544: investigate these SoftExceptions and see if we can remove entirely // or fix the root cause ReactSoftExceptionLogger.logSoftException(TAG, new ReactNoCrashSoftException(ex)); } else if (mEventListenerInitializedForFabric) { // TODO T71377544: investigate these SoftExceptions and see if we can remove entirely // or fix the root cause ReactSoftExceptionLogger.logSoftException(TAG, new ReactNoCrashSoftException(ex)); } else { throw ex; } } else { mWarnedAboutGraphTraversal = false; } } }
ReactAndroid/src/main/java/com/facebook/react/animated/NativeAnimatedNodesManager.java
/* * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ package com.facebook.react.animated; import android.util.SparseArray; import androidx.annotation.Nullable; import androidx.annotation.UiThread; import com.facebook.common.logging.FLog; import com.facebook.react.bridge.Arguments; import com.facebook.react.bridge.Callback; import com.facebook.react.bridge.JSApplicationCausedNativeException; import com.facebook.react.bridge.JSApplicationIllegalArgumentException; import com.facebook.react.bridge.ReactApplicationContext; import com.facebook.react.bridge.ReactNoCrashSoftException; import com.facebook.react.bridge.ReactSoftExceptionLogger; import com.facebook.react.bridge.ReadableArray; import com.facebook.react.bridge.ReadableMap; import com.facebook.react.bridge.UIManager; import com.facebook.react.bridge.UiThreadUtil; import com.facebook.react.bridge.WritableMap; import com.facebook.react.modules.core.DeviceEventManagerModule; import com.facebook.react.uimanager.UIManagerHelper; import com.facebook.react.uimanager.common.UIManagerType; import com.facebook.react.uimanager.events.Event; import com.facebook.react.uimanager.events.EventDispatcher; import com.facebook.react.uimanager.events.EventDispatcherListener; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.Queue; /** * This is the main class that coordinates how native animated JS implementation drives UI changes. * * <p>It implements a management interface for animated nodes graph as well as implements a graph * traversal algorithm that is run for each animation frame. * * <p>For each animation frame we visit animated nodes that might've been updated as well as their * children that may use parent's values to update themselves. At the end of the traversal algorithm * we expect to reach a special type of the node: PropsAnimatedNode that is then responsible for * calculating property map which can be sent to native view hierarchy to update the view. * * <p>IMPORTANT: This class should be accessed only from the UI Thread */ public class NativeAnimatedNodesManager implements EventDispatcherListener { private static final String TAG = "NativeAnimatedNodesManager"; private final SparseArray<AnimatedNode> mAnimatedNodes = new SparseArray<>(); private final SparseArray<AnimationDriver> mActiveAnimations = new SparseArray<>(); private final SparseArray<AnimatedNode> mUpdatedNodes = new SparseArray<>(); // Mapping of a view tag and an event name to a list of event animation drivers. 99% of the time // there will be only one driver per mapping so all code code should be optimized around that. private final Map<String, List<EventAnimationDriver>> mEventDrivers = new HashMap<>(); private final ReactApplicationContext mReactApplicationContext; private int mAnimatedGraphBFSColor = 0; // Used to avoid allocating a new array on every frame in `runUpdates` and `onEventDispatch`. private final List<AnimatedNode> mRunUpdateNodeList = new LinkedList<>(); private boolean mEventListenerInitializedForFabric = false; private boolean mEventListenerInitializedForNonFabric = false; private boolean mWarnedAboutGraphTraversal = false; public NativeAnimatedNodesManager(ReactApplicationContext reactApplicationContext) { mReactApplicationContext = reactApplicationContext; } /** * Initialize event listeners for Fabric UIManager or non-Fabric UIManager, exactly once. Once * Fabric is the only UIManager, this logic can be simplified. This is expected to only be called * from the native module thread. * * @param uiManagerType */ public void initializeEventListenerForUIManagerType(@UIManagerType final int uiManagerType) { if (uiManagerType == UIManagerType.FABRIC ? mEventListenerInitializedForFabric : mEventListenerInitializedForNonFabric) { return; } UIManager uiManager = UIManagerHelper.getUIManager(mReactApplicationContext, uiManagerType); if (uiManager != null) { uiManager.<EventDispatcher>getEventDispatcher().addListener(this); if (uiManagerType == UIManagerType.FABRIC) { mEventListenerInitializedForFabric = true; } else { mEventListenerInitializedForNonFabric = true; } } } /*package*/ @Nullable AnimatedNode getNodeById(int id) { return mAnimatedNodes.get(id); } public boolean hasActiveAnimations() { return mActiveAnimations.size() > 0 || mUpdatedNodes.size() > 0; } @UiThread public void createAnimatedNode(int tag, ReadableMap config) { if (mAnimatedNodes.get(tag) != null) { throw new JSApplicationIllegalArgumentException( "createAnimatedNode: Animated node [" + tag + "] already exists"); } String type = config.getString("type"); final AnimatedNode node; if ("style".equals(type)) { node = new StyleAnimatedNode(config, this); } else if ("value".equals(type)) { node = new ValueAnimatedNode(config); } else if ("color".equals(type)) { node = new ColorAnimatedNode(config, this, mReactApplicationContext); } else if ("props".equals(type)) { node = new PropsAnimatedNode(config, this); } else if ("interpolation".equals(type)) { node = new InterpolationAnimatedNode(config); } else if ("addition".equals(type)) { node = new AdditionAnimatedNode(config, this); } else if ("subtraction".equals(type)) { node = new SubtractionAnimatedNode(config, this); } else if ("division".equals(type)) { node = new DivisionAnimatedNode(config, this); } else if ("multiplication".equals(type)) { node = new MultiplicationAnimatedNode(config, this); } else if ("modulus".equals(type)) { node = new ModulusAnimatedNode(config, this); } else if ("diffclamp".equals(type)) { node = new DiffClampAnimatedNode(config, this); } else if ("transform".equals(type)) { node = new TransformAnimatedNode(config, this); } else if ("tracking".equals(type)) { node = new TrackingAnimatedNode(config, this); } else { throw new JSApplicationIllegalArgumentException("Unsupported node type: " + type); } node.mTag = tag; mAnimatedNodes.put(tag, node); mUpdatedNodes.put(tag, node); } @UiThread public void updateAnimatedNodeConfig(int tag, ReadableMap config) { AnimatedNode node = mAnimatedNodes.get(tag); if (node == null) { throw new JSApplicationIllegalArgumentException( "updateAnimatedNode: Animated node [" + tag + "] does not exist"); } if (node instanceof AnimatedNodeWithUpdateableConfig) { stopAnimationsForNode(node); ((AnimatedNodeWithUpdateableConfig) node).onUpdateConfig(config); mUpdatedNodes.put(tag, node); } } @UiThread public void dropAnimatedNode(int tag) { mAnimatedNodes.remove(tag); mUpdatedNodes.remove(tag); } @UiThread public void startListeningToAnimatedNodeValue(int tag, AnimatedNodeValueListener listener) { AnimatedNode node = mAnimatedNodes.get(tag); if (node == null || !(node instanceof ValueAnimatedNode)) { throw new JSApplicationIllegalArgumentException( "startListeningToAnimatedNodeValue: Animated node [" + tag + "] does not exist, or is not a 'value' node"); } ((ValueAnimatedNode) node).setValueListener(listener); } @UiThread public void stopListeningToAnimatedNodeValue(int tag) { AnimatedNode node = mAnimatedNodes.get(tag); if (node == null || !(node instanceof ValueAnimatedNode)) { throw new JSApplicationIllegalArgumentException( "startListeningToAnimatedNodeValue: Animated node [" + tag + "] does not exist, or is not a 'value' node"); } ((ValueAnimatedNode) node).setValueListener(null); } @UiThread public void setAnimatedNodeValue(int tag, double value) { AnimatedNode node = mAnimatedNodes.get(tag); if (node == null || !(node instanceof ValueAnimatedNode)) { throw new JSApplicationIllegalArgumentException( "setAnimatedNodeValue: Animated node [" + tag + "] does not exist, or is not a 'value' node"); } stopAnimationsForNode(node); ((ValueAnimatedNode) node).mValue = value; mUpdatedNodes.put(tag, node); } @UiThread public void setAnimatedNodeOffset(int tag, double offset) { AnimatedNode node = mAnimatedNodes.get(tag); if (node == null || !(node instanceof ValueAnimatedNode)) { throw new JSApplicationIllegalArgumentException( "setAnimatedNodeOffset: Animated node [" + tag + "] does not exist, or is not a 'value' node"); } ((ValueAnimatedNode) node).mOffset = offset; mUpdatedNodes.put(tag, node); } @UiThread public void flattenAnimatedNodeOffset(int tag) { AnimatedNode node = mAnimatedNodes.get(tag); if (node == null || !(node instanceof ValueAnimatedNode)) { throw new JSApplicationIllegalArgumentException( "flattenAnimatedNodeOffset: Animated node [" + tag + "] does not exist, or is not a 'value' node"); } ((ValueAnimatedNode) node).flattenOffset(); } @UiThread public void extractAnimatedNodeOffset(int tag) { AnimatedNode node = mAnimatedNodes.get(tag); if (node == null || !(node instanceof ValueAnimatedNode)) { throw new JSApplicationIllegalArgumentException( "extractAnimatedNodeOffset: Animated node [" + tag + "] does not exist, or is not a 'value' node"); } ((ValueAnimatedNode) node).extractOffset(); } @UiThread public void startAnimatingNode( int animationId, int animatedNodeTag, ReadableMap animationConfig, Callback endCallback) { AnimatedNode node = mAnimatedNodes.get(animatedNodeTag); if (node == null) { throw new JSApplicationIllegalArgumentException( "startAnimatingNode: Animated node [" + animatedNodeTag + "] does not exist"); } if (!(node instanceof ValueAnimatedNode)) { throw new JSApplicationIllegalArgumentException( "startAnimatingNode: Animated node [" + animatedNodeTag + "] should be of type " + ValueAnimatedNode.class.getName()); } final AnimationDriver existingDriver = mActiveAnimations.get(animationId); if (existingDriver != null) { // animation with the given ID is already running, we need to update its configuration instead // of spawning a new one existingDriver.resetConfig(animationConfig); return; } String type = animationConfig.getString("type"); final AnimationDriver animation; if ("frames".equals(type)) { animation = new FrameBasedAnimationDriver(animationConfig); } else if ("spring".equals(type)) { animation = new SpringAnimation(animationConfig); } else if ("decay".equals(type)) { animation = new DecayAnimation(animationConfig); } else { throw new JSApplicationIllegalArgumentException( "startAnimatingNode: Unsupported animation type [" + animatedNodeTag + "]: " + type); } animation.mId = animationId; animation.mEndCallback = endCallback; animation.mAnimatedValue = (ValueAnimatedNode) node; mActiveAnimations.put(animationId, animation); } @UiThread private void stopAnimationsForNode(AnimatedNode animatedNode) { // in most of the cases there should never be more than a few active animations running at the // same time. Therefore it does not make much sense to create an animationId -> animation // object map that would require additional memory just to support the use-case of stopping // an animation for (int i = 0; i < mActiveAnimations.size(); i++) { AnimationDriver animation = mActiveAnimations.valueAt(i); if (animatedNode.equals(animation.mAnimatedValue)) { if (animation.mEndCallback != null) { // Invoke animation end callback with {finished: false} WritableMap endCallbackResponse = Arguments.createMap(); endCallbackResponse.putBoolean("finished", false); animation.mEndCallback.invoke(endCallbackResponse); } else if (mReactApplicationContext != null) { // If no callback is passed in, this /may/ be an animation set up by the single-op // instruction from JS, meaning that no jsi::functions are passed into native and // we communicate via RCTDeviceEventEmitter instead of callbacks. WritableMap params = Arguments.createMap(); params.putInt("animationId", animation.mId); params.putBoolean("finished", false); mReactApplicationContext .getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class) .emit("onNativeAnimatedModuleAnimationFinished", params); } mActiveAnimations.removeAt(i); i--; } } } @UiThread public void stopAnimation(int animationId) { // in most of the cases there should never be more than a few active animations running at the // same time. Therefore it does not make much sense to create an animationId -> animation // object map that would require additional memory just to support the use-case of stopping // an animation for (int i = 0; i < mActiveAnimations.size(); i++) { AnimationDriver animation = mActiveAnimations.valueAt(i); if (animation.mId == animationId) { if (animation.mEndCallback != null) { // Invoke animation end callback with {finished: false} WritableMap endCallbackResponse = Arguments.createMap(); endCallbackResponse.putBoolean("finished", false); animation.mEndCallback.invoke(endCallbackResponse); } else if (mReactApplicationContext != null) { // If no callback is passed in, this /may/ be an animation set up by the single-op // instruction from JS, meaning that no jsi::functions are passed into native and // we communicate via RCTDeviceEventEmitter instead of callbacks. WritableMap params = Arguments.createMap(); params.putInt("animationId", animation.mId); params.putBoolean("finished", false); mReactApplicationContext .getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class) .emit("onNativeAnimatedModuleAnimationFinished", params); } mActiveAnimations.removeAt(i); return; } } // Do not throw an error in the case animation could not be found. We only keep "active" // animations in the registry and there is a chance that Animated.js will enqueue a // stopAnimation call after the animation has ended or the call will reach native thread only // when the animation is already over. } @UiThread public void connectAnimatedNodes(int parentNodeTag, int childNodeTag) { AnimatedNode parentNode = mAnimatedNodes.get(parentNodeTag); if (parentNode == null) { throw new JSApplicationIllegalArgumentException( "connectAnimatedNodes: Animated node with tag (parent) [" + parentNodeTag + "] does not exist"); } AnimatedNode childNode = mAnimatedNodes.get(childNodeTag); if (childNode == null) { throw new JSApplicationIllegalArgumentException( "connectAnimatedNodes: Animated node with tag (child) [" + childNodeTag + "] does not exist"); } parentNode.addChild(childNode); mUpdatedNodes.put(childNodeTag, childNode); } public void disconnectAnimatedNodes(int parentNodeTag, int childNodeTag) { AnimatedNode parentNode = mAnimatedNodes.get(parentNodeTag); if (parentNode == null) { throw new JSApplicationIllegalArgumentException( "disconnectAnimatedNodes: Animated node with tag (parent) [" + parentNodeTag + "] does not exist"); } AnimatedNode childNode = mAnimatedNodes.get(childNodeTag); if (childNode == null) { throw new JSApplicationIllegalArgumentException( "disconnectAnimatedNodes: Animated node with tag (child) [" + childNodeTag + "] does not exist"); } parentNode.removeChild(childNode); mUpdatedNodes.put(childNodeTag, childNode); } @UiThread public void connectAnimatedNodeToView(int animatedNodeTag, int viewTag) { AnimatedNode node = mAnimatedNodes.get(animatedNodeTag); if (node == null) { throw new JSApplicationIllegalArgumentException( "connectAnimatedNodeToView: Animated node with tag [" + animatedNodeTag + "] does not exist"); } if (!(node instanceof PropsAnimatedNode)) { throw new JSApplicationIllegalArgumentException( "connectAnimatedNodeToView: Animated node connected to view [" + viewTag + "] should be of type " + PropsAnimatedNode.class.getName()); } if (mReactApplicationContext == null) { throw new IllegalStateException( "connectAnimatedNodeToView: Animated node could not be connected, no ReactApplicationContext: " + viewTag); } @Nullable UIManager uiManager = UIManagerHelper.getUIManagerForReactTag(mReactApplicationContext, viewTag); if (uiManager == null) { ReactSoftExceptionLogger.logSoftException( TAG, new ReactNoCrashSoftException( "connectAnimatedNodeToView: Animated node could not be connected to UIManager - uiManager disappeared for tag: " + viewTag)); return; } PropsAnimatedNode propsAnimatedNode = (PropsAnimatedNode) node; propsAnimatedNode.connectToView(viewTag, uiManager); mUpdatedNodes.put(animatedNodeTag, node); } @UiThread public void disconnectAnimatedNodeFromView(int animatedNodeTag, int viewTag) { AnimatedNode node = mAnimatedNodes.get(animatedNodeTag); if (node == null) { throw new JSApplicationIllegalArgumentException( "disconnectAnimatedNodeFromView: Animated node with tag [" + animatedNodeTag + "] does not exist"); } if (!(node instanceof PropsAnimatedNode)) { throw new JSApplicationIllegalArgumentException( "disconnectAnimatedNodeFromView: Animated node connected to view [" + viewTag + "] should be of type " + PropsAnimatedNode.class.getName()); } PropsAnimatedNode propsAnimatedNode = (PropsAnimatedNode) node; propsAnimatedNode.disconnectFromView(viewTag); } @UiThread public void getValue(int tag, Callback callback) { AnimatedNode node = mAnimatedNodes.get(tag); if (node == null || !(node instanceof ValueAnimatedNode)) { throw new JSApplicationIllegalArgumentException( "getValue: Animated node with tag [" + tag + "] does not exist or is not a 'value' node"); } double value = ((ValueAnimatedNode) node).getValue(); if (callback != null) { callback.invoke(value); return; } // If there's no callback, that means that JS is using the single-operation mode, and not // passing any callbacks into Java. // See NativeAnimatedHelper.js for details. // Instead, we use RCTDeviceEventEmitter to pass data back to JS and emulate callbacks. if (mReactApplicationContext == null) { return; } WritableMap params = Arguments.createMap(); params.putInt("tag", tag); params.putDouble("value", value); mReactApplicationContext .getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class) .emit("onNativeAnimatedModuleGetValue", params); } @UiThread public void restoreDefaultValues(int animatedNodeTag) { AnimatedNode node = mAnimatedNodes.get(animatedNodeTag); // Restoring default values needs to happen before UIManager operations so it is // possible the node hasn't been created yet if it is being connected and // disconnected in the same batch. In that case we don't need to restore // default values since it will never actually update the view. if (node == null) { return; } if (!(node instanceof PropsAnimatedNode)) { throw new JSApplicationIllegalArgumentException( "Animated node connected to view [?] should be of type " + PropsAnimatedNode.class.getName()); } PropsAnimatedNode propsAnimatedNode = (PropsAnimatedNode) node; propsAnimatedNode.restoreDefaultValues(); } @UiThread public void addAnimatedEventToView(int viewTag, String eventName, ReadableMap eventMapping) { int nodeTag = eventMapping.getInt("animatedValueTag"); AnimatedNode node = mAnimatedNodes.get(nodeTag); if (node == null) { throw new JSApplicationIllegalArgumentException( "addAnimatedEventToView: Animated node with tag [" + nodeTag + "] does not exist"); } if (!(node instanceof ValueAnimatedNode)) { throw new JSApplicationIllegalArgumentException( "addAnimatedEventToView: Animated node on view [" + viewTag + "] connected to event (" + eventName + ") should be of type " + ValueAnimatedNode.class.getName()); } ReadableArray path = eventMapping.getArray("nativeEventPath"); List<String> pathList = new ArrayList<>(path.size()); for (int i = 0; i < path.size(); i++) { pathList.add(path.getString(i)); } EventAnimationDriver event = new EventAnimationDriver(pathList, (ValueAnimatedNode) node); String key = viewTag + eventName; if (mEventDrivers.containsKey(key)) { mEventDrivers.get(key).add(event); } else { List<EventAnimationDriver> drivers = new ArrayList<>(1); drivers.add(event); mEventDrivers.put(key, drivers); } } @UiThread public void removeAnimatedEventFromView(int viewTag, String eventName, int animatedValueTag) { String key = viewTag + eventName; if (mEventDrivers.containsKey(key)) { List<EventAnimationDriver> driversForKey = mEventDrivers.get(key); if (driversForKey.size() == 1) { mEventDrivers.remove(viewTag + eventName); } else { ListIterator<EventAnimationDriver> it = driversForKey.listIterator(); while (it.hasNext()) { if (it.next().mValueNode.mTag == animatedValueTag) { it.remove(); break; } } } } } @Override public void onEventDispatch(final Event event) { // Events can be dispatched from any thread so we have to make sure handleEvent is run from the // UI thread. if (UiThreadUtil.isOnUiThread()) { handleEvent(event); } else { UiThreadUtil.runOnUiThread( new Runnable() { @Override public void run() { handleEvent(event); } }); } } @UiThread private void handleEvent(Event event) { if (!mEventDrivers.isEmpty()) { // If the event has a different name in native convert it to it's JS name. // TODO T64216139 Remove dependency of UIManagerModule when the Constants are not in Native // anymore if (mReactApplicationContext == null) { return; } UIManager uiManager = UIManagerHelper.getUIManager(mReactApplicationContext, event.getUIManagerType()); if (uiManager == null) { return; } String eventName = uiManager.resolveCustomDirectEventName(event.getEventName()); if (eventName == null) { eventName = ""; } List<EventAnimationDriver> driversForKey = mEventDrivers.get(event.getViewTag() + eventName); if (driversForKey != null) { for (EventAnimationDriver driver : driversForKey) { stopAnimationsForNode(driver.mValueNode); event.dispatch(driver); mRunUpdateNodeList.add(driver.mValueNode); } updateNodes(mRunUpdateNodeList); mRunUpdateNodeList.clear(); } } } /** * Animation loop performs two BFSes over the graph of animated nodes. We use incremented {@code * mAnimatedGraphBFSColor} to mark nodes as visited in each of the BFSes which saves additional * loops for clearing "visited" states. * * <p>First BFS starts with nodes that are in {@code mUpdatedNodes} (that is, their value have * been modified from JS in the last batch of JS operations) or directly attached to an active * animation (hence linked to objects from {@code mActiveAnimations}). In that step we calculate * an attribute {@code mActiveIncomingNodes}. The second BFS runs in topological order over the * sub-graph of *active* nodes. This is done by adding node to the BFS queue only if all its * "predecessors" have already been visited. */ @UiThread public void runUpdates(long frameTimeNanos) { UiThreadUtil.assertOnUiThread(); boolean hasFinishedAnimations = false; for (int i = 0; i < mUpdatedNodes.size(); i++) { AnimatedNode node = mUpdatedNodes.valueAt(i); mRunUpdateNodeList.add(node); } // Clean mUpdatedNodes queue mUpdatedNodes.clear(); for (int i = 0; i < mActiveAnimations.size(); i++) { AnimationDriver animation = mActiveAnimations.valueAt(i); animation.runAnimationStep(frameTimeNanos); AnimatedNode valueNode = animation.mAnimatedValue; mRunUpdateNodeList.add(valueNode); if (animation.mHasFinished) { hasFinishedAnimations = true; } } updateNodes(mRunUpdateNodeList); mRunUpdateNodeList.clear(); // Cleanup finished animations. Iterate over the array of animations and override ones that has // finished, then resize `mActiveAnimations`. if (hasFinishedAnimations) { for (int i = mActiveAnimations.size() - 1; i >= 0; i--) { AnimationDriver animation = mActiveAnimations.valueAt(i); if (animation.mHasFinished) { if (animation.mEndCallback != null) { WritableMap endCallbackResponse = Arguments.createMap(); endCallbackResponse.putBoolean("finished", true); animation.mEndCallback.invoke(endCallbackResponse); } else if (mReactApplicationContext != null) { // If no callback is passed in, this /may/ be an animation set up by the single-op // instruction from JS, meaning that no jsi::functions are passed into native and // we communicate via RCTDeviceEventEmitter instead of callbacks. WritableMap params = Arguments.createMap(); params.putInt("animationId", animation.mId); params.putBoolean("finished", true); DeviceEventManagerModule.RCTDeviceEventEmitter eventEmitter = mReactApplicationContext.getJSModule( DeviceEventManagerModule.RCTDeviceEventEmitter.class); if (eventEmitter != null) { eventEmitter.emit("onNativeAnimatedModuleAnimationFinished", params); } } mActiveAnimations.removeAt(i); } } } } @UiThread private void updateNodes(List<AnimatedNode> nodes) { int activeNodesCount = 0; int updatedNodesCount = 0; // STEP 1. // BFS over graph of nodes. Update `mIncomingNodes` attribute for each node during that BFS. // Store number of visited nodes in `activeNodesCount`. We "execute" active animations as a part // of this step. mAnimatedGraphBFSColor++; /* use new color */ if (mAnimatedGraphBFSColor == AnimatedNode.INITIAL_BFS_COLOR) { // value "0" is used as an initial color for a new node, using it in BFS may cause some nodes // to be skipped. mAnimatedGraphBFSColor++; } Queue<AnimatedNode> nodesQueue = new ArrayDeque<>(); for (AnimatedNode node : nodes) { if (node.mBFSColor != mAnimatedGraphBFSColor) { node.mBFSColor = mAnimatedGraphBFSColor; activeNodesCount++; nodesQueue.add(node); } } while (!nodesQueue.isEmpty()) { AnimatedNode nextNode = nodesQueue.poll(); if (nextNode.mChildren != null) { for (int i = 0; i < nextNode.mChildren.size(); i++) { AnimatedNode child = nextNode.mChildren.get(i); child.mActiveIncomingNodes++; if (child.mBFSColor != mAnimatedGraphBFSColor) { child.mBFSColor = mAnimatedGraphBFSColor; activeNodesCount++; nodesQueue.add(child); } } } } // STEP 2 // BFS over the graph of active nodes in topological order -> visit node only when all its // "predecessors" in the graph have already been visited. It is important to visit nodes in that // order as they may often use values of their predecessors in order to calculate "next state" // of their own. We start by determining the starting set of nodes by looking for nodes with // `mActiveIncomingNodes = 0` (those can only be the ones that we start BFS in the previous // step). We store number of visited nodes in this step in `updatedNodesCount` mAnimatedGraphBFSColor++; if (mAnimatedGraphBFSColor == AnimatedNode.INITIAL_BFS_COLOR) { // see reasoning for this check a few lines above mAnimatedGraphBFSColor++; } // find nodes with zero "incoming nodes", those can be either nodes from `mUpdatedNodes` or // ones connected to active animations for (AnimatedNode node : nodes) { if (node.mActiveIncomingNodes == 0 && node.mBFSColor != mAnimatedGraphBFSColor) { node.mBFSColor = mAnimatedGraphBFSColor; updatedNodesCount++; nodesQueue.add(node); } } // Run main "update" loop int cyclesDetected = 0; while (!nodesQueue.isEmpty()) { AnimatedNode nextNode = nodesQueue.poll(); try { nextNode.update(); if (nextNode instanceof PropsAnimatedNode) { // Send property updates to native view manager ((PropsAnimatedNode) nextNode).updateView(); } } catch (JSApplicationCausedNativeException e) { // An exception is thrown if the view hasn't been created yet. This can happen because // views are created in batches. If this particular view didn't make it into a batch yet, // the view won't exist and an exception will be thrown when attempting to start an // animation on it. // // Eat the exception rather than crashing. The impact is that we may drop one or more // frames of the animation. FLog.e(TAG, "Native animation workaround, frame lost as result of race condition", e); } if (nextNode instanceof ValueAnimatedNode) { // Potentially send events to JS when the node's value is updated ((ValueAnimatedNode) nextNode).onValueUpdate(); } if (nextNode.mChildren != null) { for (int i = 0; i < nextNode.mChildren.size(); i++) { AnimatedNode child = nextNode.mChildren.get(i); child.mActiveIncomingNodes--; if (child.mBFSColor != mAnimatedGraphBFSColor && child.mActiveIncomingNodes == 0) { child.mBFSColor = mAnimatedGraphBFSColor; updatedNodesCount++; nodesQueue.add(child); } else if (child.mBFSColor == mAnimatedGraphBFSColor) { cyclesDetected++; } } } } // Verify that we've visited *all* active nodes. Throw otherwise as this could mean there is a // cycle in animated node graph, or that the graph is only partially set up. We also take // advantage of the fact that all active nodes are visited in the step above so that all the // nodes properties `mActiveIncomingNodes` are set to zero. // In Fabric there can be race conditions between the JS thread setting up or tearing down // animated nodes, and Fabric executing them on the UI thread, leading to temporary inconsistent // states. if (activeNodesCount != updatedNodesCount) { if (mWarnedAboutGraphTraversal) { return; } mWarnedAboutGraphTraversal = true; // Before crashing or logging soft exception, log details about current graph setup FLog.e(TAG, "Detected animation cycle or disconnected graph. "); for (AnimatedNode node : nodes) { FLog.e(TAG, node.prettyPrintWithChildren()); } // If we're running only in non-Fabric, we still throw an exception. // In Fabric, it seems that animations enter an inconsistent state fairly often. // We detect if the inconsistency is due to a cycle (a fatal error for which we must crash) // or disconnected regions, indicating a partially-set-up animation graph, which is not // fatal and can stay a warning. String reason = cyclesDetected > 0 ? "cycles (" + cyclesDetected + ")" : "disconnected regions"; IllegalStateException ex = new IllegalStateException( "Looks like animated nodes graph has " + reason + ", there are " + activeNodesCount + " but toposort visited only " + updatedNodesCount); if (mEventListenerInitializedForFabric && cyclesDetected == 0) { // TODO T71377544: investigate these SoftExceptions and see if we can remove entirely // or fix the root cause ReactSoftExceptionLogger.logSoftException(TAG, new ReactNoCrashSoftException(ex)); } else if (mEventListenerInitializedForFabric) { // TODO T71377544: investigate these SoftExceptions and see if we can remove entirely // or fix the root cause ReactSoftExceptionLogger.logSoftException(TAG, new ReactNoCrashSoftException(ex)); } else { throw ex; } } else { mWarnedAboutGraphTraversal = false; } } }
Add check for native animated node existing before starting animation Summary: We need to check that the animated node exists prior to executing the animation. The native animated node lifecycle is not synced with Fabric and nodes are frequently destroyed and re-created on rerenders. Therefore, there is a possibility that the the animated node does not exist when the native event is dispatched, in particular with native call batching. Changelog: [Internal] - Make NativeAnimatedNodesManager.getNodeById public Reviewed By: JoshuaGross Differential Revision: D37323138 fbshipit-source-id: ed0567871b4189c454b6b3145b853ecdfe844840
ReactAndroid/src/main/java/com/facebook/react/animated/NativeAnimatedNodesManager.java
Add check for native animated node existing before starting animation
<ide><path>eactAndroid/src/main/java/com/facebook/react/animated/NativeAnimatedNodesManager.java <ide> } <ide> } <ide> <del> /*package*/ @Nullable <del> AnimatedNode getNodeById(int id) { <add> @Nullable <add> public AnimatedNode getNodeById(int id) { <ide> return mAnimatedNodes.get(id); <ide> } <ide>
Java
apache-2.0
0a0f9a7cab9f46b265c094545121fbe057b5bc51
0
springning/presto,siddhartharay007/presto,CHINA-JD/presto,kuzemchik/presto,mbeitchman/presto,smartpcr/presto,zjshen/presto,treasure-data/presto,miniway/presto,mcanthony/presto,mvp/presto,ipros-team/presto,yu-yamada/presto,mono-plane/presto,hgschmie/presto,stewartpark/presto,erichwang/presto,fengshao0907/presto,deciament/presto,hulu/presto,xiangel/presto,nileema/presto,raghavsethi/presto,lingochamp/presto,treasure-data/presto,shixuan-fan/presto,pnowojski/presto,zhenyuy-fb/presto,fipar/presto,11xor6/presto,arhimondr/presto,wyukawa/presto,electrum/presto,Zoomdata/presto,ptkool/presto,lingochamp/presto,nakajijiji/presto,nsabharwal/presto,mandusm/presto,wagnermarkd/presto,zhenyuy-fb/presto,kingland/presto,wrmsr/presto,hgschmie/presto,wyukawa/presto,wyukawa/presto,totticarter/presto,RobinUS2/presto,cberner/presto,HackShare/Presto,takari/presto,cawallin/presto,zhenyuy-fb/presto,wyukawa/presto,Yaliang/presto,dabaitu/presto,ajoabraham/presto,toxeh/presto,avasilevskiy/presto,joy-yao/presto,pwz3n0/presto,mono-plane/presto,avasilevskiy/presto,haozhun/presto,gh351135612/presto,mugglmenzel/presto,hgschmie/presto,toyama0919/presto,pwz3n0/presto,prateek1306/presto,troels/nz-presto,kuzemchik/presto,ArturGajowy/presto,nezihyigitbasi/presto,erichwang/presto,elonazoulay/presto,ebyhr/presto,ajoabraham/presto,ebyhr/presto,albertocsm/presto,chrisunder/presto,albertocsm/presto,miniway/presto,treasure-data/presto,mcanthony/presto,facebook/presto,Svjard/presto,miquelruiz/presto,wrmsr/presto,tomz/presto,troels/nz-presto,svstanev/presto,dain/presto,pnowojski/presto,jiangyifangh/presto,nvoron23/presto,prateek1306/presto,suyucs/presto,zzhao0/presto,mpilman/presto,Zoomdata/presto,springning/presto,totticarter/presto,RobinUS2/presto,jf367/presto,jxiang/presto,EvilMcJerkface/presto,jekey/presto,RobinUS2/presto,takari/presto,ebd2/presto,cosinequanon/presto,nakajijiji/presto,ptkool/presto,toxeh/presto,jietang3/test,kingland/presto,haitaoyao/presto,martint/presto,haitaoyao/presto,mvp/presto,aleph-zero/presto,Praveen2112/presto,jietang3/test,TeradataCenterForHadoop/bootcamp,tellproject/presto,ocono-tech/presto,saidalaoui/presto,RobinUS2/presto,smartnews/presto,aglne/presto,ajoabraham/presto,martint/presto,elonazoulay/presto,fiedukow/presto,harunurhan/presto,mvp/presto,propene/presto,tomz/presto,Svjard/presto,Svjard/presto,yu-yamada/presto,Teradata/presto,ajoabraham/presto,cosinequanon/presto,jiangyifangh/presto,chrisunder/presto,kined/presto,kietly/presto,vishalsan/presto,zhenxiao/presto,losipiuk/presto,miquelruiz/presto,cawallin/presto,Zoomdata/presto,DanielTing/presto,Jimexist/presto,aramesh117/presto,ipros-team/presto,idemura/presto,geraint0923/presto,mattyb149/presto,jekey/presto,raghavsethi/presto,hgschmie/presto,denizdemir/presto,joshk/presto,kined/presto,siddhartharay007/presto,sopel39/presto,yuananf/presto,kietly/presto,wangcan2014/presto,smartpcr/presto,EvilMcJerkface/presto,kaschaeffer/presto,Praveen2112/presto,zzhao0/presto,toyama0919/presto,stagraqubole/presto,nileema/presto,hulu/presto,miniway/presto,yuananf/presto,kietly/presto,sunchao/presto,saidalaoui/presto,kietly/presto,mugglmenzel/presto,mattyb149/presto,vishalsan/presto,yuananf/presto,DanielTing/presto,Svjard/presto,tomz/presto,kaschaeffer/presto,vermaravikant/presto,cberner/presto,gcnonato/presto,stewartpark/presto,damiencarol/presto,xiangel/presto,nsabharwal/presto,shubham166/presto,zofuthan/presto,stewartpark/presto,Praveen2112/presto,ebd2/presto,hulu/presto,kuzemchik/presto,jf367/presto,pnowojski/presto,dongjoon-hyun/presto,tellproject/presto,harunurhan/presto,aleph-zero/presto,soz-fb/presto,zzhao0/presto,prestodb/presto,bloomberg/presto,wrmsr/presto,toxeh/presto,Jimexist/presto,arhimondr/presto,lingochamp/presto,mpilman/presto,wagnermarkd/presto,cosinequanon/presto,joshk/presto,Praveen2112/presto,ebd2/presto,stewartpark/presto,wagnermarkd/presto,mandusm/presto,mandusm/presto,facebook/presto,sopel39/presto,EvilMcJerkface/presto,joshk/presto,mpilman/presto,ocono-tech/presto,tellproject/presto,takari/presto,mugglmenzel/presto,mono-plane/presto,zofuthan/presto,mbeitchman/presto,Yaliang/presto,toyama0919/presto,sopel39/presto,losipiuk/presto,jacobgao/presto,aglne/presto,ipros-team/presto,lingochamp/presto,smartnews/presto,DanielTing/presto,shixuan-fan/presto,geraint0923/presto,dongjoon-hyun/presto,propene/presto,kingland/presto,TeradataCenterForHadoop/bootcamp,zjshen/presto,mcanthony/presto,jxiang/presto,treasure-data/presto,youngwookim/presto,Teradata/presto,takari/presto,deciament/presto,sdgdsffdsfff/presto,nakajijiji/presto,denizdemir/presto,deciament/presto,Nasdaq/presto,cawallin/presto,troels/nz-presto,prestodb/presto,mattyb149/presto,aramesh117/presto,Svjard/presto,CHINA-JD/presto,prateek1306/presto,springning/presto,youngwookim/presto,mvp/presto,ArturGajowy/presto,harunurhan/presto,ArturGajowy/presto,jacobgao/presto,sumitkgec/presto,gh351135612/presto,svstanev/presto,y-lan/presto,fengshao0907/presto,wyukawa/presto,damiencarol/presto,geraint0923/presto,mandusm/presto,mode/presto,rockerbox/presto,aleph-zero/presto,mugglmenzel/presto,yuananf/presto,joy-yao/presto,kuzemchik/presto,ebd2/presto,nileema/presto,youngwookim/presto,mode/presto,ArturGajowy/presto,joy-yao/presto,kined/presto,propene/presto,sumitkgec/presto,smartpcr/presto,arhimondr/presto,facebook/presto,nezihyigitbasi/presto,vermaravikant/presto,dain/presto,hgschmie/presto,mpilman/presto,jekey/presto,Myrthan/presto,sopel39/presto,jiekechoo/presto,fengshao0907/presto,ocono-tech/presto,aglne/presto,fipar/presto,ebd2/presto,jf367/presto,Zoomdata/presto,treasure-data/presto,Jimexist/presto,kaschaeffer/presto,aramesh117/presto,geraint0923/presto,haitaoyao/presto,avasilevskiy/presto,toyama0919/presto,y-lan/presto,shixuan-fan/presto,prestodb/presto,fipar/presto,fengshao0907/presto,hulu/presto,springning/presto,kingland/presto,Jimexist/presto,wangcan2014/presto,totticarter/presto,y-lan/presto,dongjoon-hyun/presto,Teradata/presto,mpilman/presto,joy-yao/presto,kaschaeffer/presto,vishalsan/presto,pnowojski/presto,cawallin/presto,RobinUS2/presto,TeradataCenterForHadoop/bootcamp,jacobgao/presto,saidalaoui/presto,elonazoulay/presto,gh351135612/presto,haitaoyao/presto,xiangel/presto,Yaliang/presto,losipiuk/presto,wangcan2014/presto,albertocsm/presto,zhenxiao/presto,nakajijiji/presto,tellproject/presto,cawallin/presto,damiencarol/presto,totticarter/presto,DanielTing/presto,nsabharwal/presto,zhenyuy-fb/presto,yu-yamada/presto,rockerbox/presto,dain/presto,suyucs/presto,nezihyigitbasi/presto,sunchao/presto,soz-fb/presto,gcnonato/presto,fiedukow/presto,Jimexist/presto,sunchao/presto,raghavsethi/presto,smartpcr/presto,dabaitu/presto,jiekechoo/presto,sumitkgec/presto,zjshen/presto,sumitkgec/presto,dongjoon-hyun/presto,siddhartharay007/presto,zzhao0/presto,nakajijiji/presto,smartpcr/presto,prestodb/presto,idemura/presto,TeradataCenterForHadoop/bootcamp,fipar/presto,twitter-forks/presto,facebook/presto,aramesh117/presto,Teradata/presto,idemura/presto,kined/presto,XiaominZhang/presto,chrisunder/presto,nsabharwal/presto,sunchao/presto,zhenyuy-fb/presto,wagnermarkd/presto,cosinequanon/presto,saidalaoui/presto,zofuthan/presto,martint/presto,ebyhr/presto,XiaominZhang/presto,tomz/presto,smartnews/presto,sdgdsffdsfff/presto,rockerbox/presto,losipiuk/presto,twitter-forks/presto,zhenxiao/presto,zjshen/presto,totticarter/presto,zhenxiao/presto,nileema/presto,Yaliang/presto,nvoron23/presto,jiekechoo/presto,mode/presto,wangcan2014/presto,XiaominZhang/presto,ArturGajowy/presto,springning/presto,siddhartharay007/presto,CHINA-JD/presto,sumanth232/presto,ebyhr/presto,gh351135612/presto,kined/presto,svstanev/presto,mono-plane/presto,jiangyifangh/presto,frsyuki/presto,zjshen/presto,wrmsr/presto,rockerbox/presto,shixuan-fan/presto,aglne/presto,sunchao/presto,twitter-forks/presto,vermaravikant/presto,ocono-tech/presto,lingochamp/presto,sumanth232/presto,jiangyifangh/presto,mbeitchman/presto,suyucs/presto,jacobgao/presto,damiencarol/presto,miquelruiz/presto,denizdemir/presto,ipros-team/presto,dabaitu/presto,vermaravikant/presto,prestodb/presto,haozhun/presto,zzhao0/presto,EvilMcJerkface/presto,jxiang/presto,idemura/presto,haozhun/presto,troels/nz-presto,jf367/presto,CHINA-JD/presto,tellproject/presto,martint/presto,XiaominZhang/presto,mbeitchman/presto,Myrthan/presto,losipiuk/presto,propene/presto,mattyb149/presto,soz-fb/presto,jiangyifangh/presto,vermaravikant/presto,11xor6/presto,elonazoulay/presto,cberner/presto,soz-fb/presto,11xor6/presto,haozhun/presto,kingland/presto,hulu/presto,deciament/presto,jiekechoo/presto,erichwang/presto,elonazoulay/presto,twitter-forks/presto,avasilevskiy/presto,dongjoon-hyun/presto,bloomberg/presto,Zoomdata/presto,dabaitu/presto,yuananf/presto,cosinequanon/presto,smartnews/presto,svstanev/presto,xiangel/presto,prateek1306/presto,denizdemir/presto,ipros-team/presto,sopel39/presto,gcnonato/presto,ajoabraham/presto,electrum/presto,haitaoyao/presto,kietly/presto,pwz3n0/presto,cberner/presto,nezihyigitbasi/presto,wrmsr/presto,bloomberg/presto,toxeh/presto,tomz/presto,ocono-tech/presto,miniway/presto,shubham166/presto,stagraqubole/presto,sumanth232/presto,troels/nz-presto,stewartpark/presto,aramesh117/presto,sdgdsffdsfff/presto,soz-fb/presto,youngwookim/presto,sumitkgec/presto,idemura/presto,wangcan2014/presto,prestodb/presto,11xor6/presto,joy-yao/presto,suyucs/presto,jekey/presto,joshk/presto,propene/presto,erichwang/presto,aglne/presto,Nasdaq/presto,pwz3n0/presto,raghavsethi/presto,mattyb149/presto,smartnews/presto,HackShare/Presto,kuzemchik/presto,gh351135612/presto,shubham166/presto,xiangel/presto,Nasdaq/presto,takari/presto,miniway/presto,yu-yamada/presto,toxeh/presto,avasilevskiy/presto,y-lan/presto,arhimondr/presto,nsabharwal/presto,fiedukow/presto,Yaliang/presto,rockerbox/presto,shubham166/presto,DanielTing/presto,jxiang/presto,deciament/presto,siddhartharay007/presto,jiekechoo/presto,treasure-data/presto,fipar/presto,cberner/presto,haozhun/presto,y-lan/presto,harunurhan/presto,electrum/presto,fiedukow/presto,svstanev/presto,dabaitu/presto,geraint0923/presto,dain/presto,electrum/presto,toyama0919/presto,zofuthan/presto,youngwookim/presto,bloomberg/presto,albertocsm/presto,mpilman/presto,damiencarol/presto,nezihyigitbasi/presto,sumanth232/presto,prateek1306/presto,mandusm/presto,Myrthan/presto,Myrthan/presto,EvilMcJerkface/presto,nileema/presto,harunurhan/presto,pwz3n0/presto,nvoron23/presto,facebook/presto,mvp/presto,raghavsethi/presto,miquelruiz/presto,Praveen2112/presto,nvoron23/presto,wagnermarkd/presto,ebyhr/presto,ptkool/presto,jxiang/presto,tellproject/presto,gcnonato/presto,aleph-zero/presto,suyucs/presto,shixuan-fan/presto,HackShare/Presto,zofuthan/presto,mbeitchman/presto,dain/presto,frsyuki/presto,chrisunder/presto,jf367/presto,aleph-zero/presto,kaschaeffer/presto,saidalaoui/presto,Nasdaq/presto,Teradata/presto,wrmsr/presto,chrisunder/presto,miquelruiz/presto,martint/presto,mode/presto,fiedukow/presto,arhimondr/presto,mcanthony/presto,electrum/presto,fengshao0907/presto,ptkool/presto,twitter-forks/presto,ptkool/presto,mode/presto,mcanthony/presto,erichwang/presto,yu-yamada/presto,bloomberg/presto,TeradataCenterForHadoop/bootcamp,mugglmenzel/presto,11xor6/presto,albertocsm/presto,sumanth232/presto,CHINA-JD/presto,Nasdaq/presto,XiaominZhang/presto,jekey/presto,Myrthan/presto
package com.facebook.presto; import com.facebook.presto.connector.dual.DualMetadata; import com.facebook.presto.connector.informationSchema.InformationSchemaMetadata; import com.facebook.presto.importer.MockPeriodicImportManager; import com.facebook.presto.metadata.MetadataManager; import com.facebook.presto.spi.ColumnMetadata; import com.facebook.presto.spi.RecordCursor; import com.facebook.presto.spi.RecordSet; import com.facebook.presto.spi.TableMetadata; import com.facebook.presto.sql.analyzer.QueryExplainer; import com.facebook.presto.sql.analyzer.Session; import com.facebook.presto.sql.parser.SqlParser; import com.facebook.presto.sql.planner.PlanOptimizersFactory; import com.facebook.presto.sql.planner.optimizations.PlanOptimizer; import com.facebook.presto.sql.tree.ExplainType; import com.facebook.presto.sql.tree.Query; import com.facebook.presto.storage.MockStorageManager; import com.facebook.presto.tpch.TpchMetadata; import com.facebook.presto.tuple.Tuple; import com.facebook.presto.tuple.TupleInfo; import com.facebook.presto.util.MaterializedResult; import com.facebook.presto.util.MaterializedTuple; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableMultiset; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Multimap; import com.google.common.collect.Multimaps; import com.google.common.collect.Ordering; import io.airlift.log.Logger; import io.airlift.log.Logging; import io.airlift.slice.Slices; import io.airlift.units.Duration; import org.intellij.lang.annotations.Language; import org.skife.jdbi.v2.DBI; import org.skife.jdbi.v2.Handle; import org.skife.jdbi.v2.PreparedBatch; import org.skife.jdbi.v2.PreparedBatchPart; import org.skife.jdbi.v2.StatementContext; import org.skife.jdbi.v2.tweak.ResultSetMapper; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import java.sql.ResultSet; import java.sql.SQLException; import java.util.List; import static com.facebook.presto.sql.analyzer.Session.DEFAULT_CATALOG; import static com.facebook.presto.sql.analyzer.Session.DEFAULT_SCHEMA; import static com.facebook.presto.sql.tree.ExplainType.Type.DISTRIBUTED; import static com.facebook.presto.sql.tree.ExplainType.Type.LOGICAL; import static com.facebook.presto.tpch.TpchMetadata.TPCH_LINEITEM_METADATA; import static com.facebook.presto.tpch.TpchMetadata.TPCH_LINEITEM_NAME; import static com.facebook.presto.tpch.TpchMetadata.TPCH_ORDERS_METADATA; import static com.facebook.presto.tpch.TpchMetadata.TPCH_ORDERS_NAME; import static com.facebook.presto.tpch.TpchMetadata.TPCH_SCHEMA_NAME; import static com.facebook.presto.tuple.TupleInfo.Type.BOOLEAN; import static com.facebook.presto.tuple.TupleInfo.Type.DOUBLE; import static com.facebook.presto.tuple.TupleInfo.Type.FIXED_INT_64; import static com.facebook.presto.tuple.TupleInfo.Type.VARIABLE_BINARY; import static com.facebook.presto.util.InMemoryTpchBlocksProvider.readTpchRecords; import static com.facebook.presto.util.MaterializedResult.resultBuilder; import static com.google.common.base.Charsets.UTF_8; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.collect.Iterables.transform; import static java.lang.String.format; import static java.util.Collections.nCopies; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertTrue; import static org.testng.Assert.fail; public abstract class AbstractTestQueries { private Handle handle; @Test public void testMaxMinStringWithNulls() throws Exception { assertQuery("SELECT custkey, MAX(NULLIF(orderstatus, 'O')), MIN(NULLIF(orderstatus, 'O')) FROM orders GROUP BY custkey"); } @Test public void testApproxPercentile() throws Exception { MaterializedResult raw = computeActual("SELECT orderstatus, orderkey, totalprice FROM ORDERS"); Multimap<String, Long> orderKeyByStatus = ArrayListMultimap.create(); Multimap<String, Double> totalPriceByStatus = ArrayListMultimap.create(); for (MaterializedTuple tuple : raw.getMaterializedTuples()) { orderKeyByStatus.put((String) tuple.getField(0), (Long) tuple.getField(1)); totalPriceByStatus.put((String) tuple.getField(0), (Double) tuple.getField(2)); } MaterializedResult actual = computeActual("" + "SELECT orderstatus, " + " approx_percentile(orderkey, 0.5), " + " approx_percentile(totalprice, 0.5)," + " approx_percentile(orderkey, 2, 0.5)," + " approx_percentile(totalprice, 2, 0.5)\n" + "FROM ORDERS\n" + "GROUP BY orderstatus"); for (MaterializedTuple tuple : actual.getMaterializedTuples()) { String status = (String) tuple.getField(0); Long orderKey = (Long) tuple.getField(1); Double totalPrice = (Double) tuple.getField(2); Long orderKeyWeighted = (Long) tuple.getField(3); Double totalPriceWeighted = (Double) tuple.getField(4); List<Long> orderKeys = Ordering.natural().sortedCopy(orderKeyByStatus.get(status)); List<Double> totalPrices = Ordering.natural().sortedCopy(totalPriceByStatus.get(status)); // verify real rank of returned value is within 1% of requested rank assertTrue(orderKey >= orderKeys.get((int) (0.49 * orderKeys.size()))); assertTrue(orderKey <= orderKeys.get((int) (0.51 * orderKeys.size()))); assertTrue(orderKeyWeighted >= orderKeys.get((int) (0.49 * orderKeys.size()))); assertTrue(orderKeyWeighted <= orderKeys.get((int) (0.51 * orderKeys.size()))); assertTrue(totalPrice >= totalPrices.get((int) (0.49 * totalPrices.size()))); assertTrue(totalPrice <= totalPrices.get((int) (0.51 * totalPrices.size()))); assertTrue(totalPriceWeighted >= totalPrices.get((int) (0.49 * totalPrices.size()))); assertTrue(totalPriceWeighted <= totalPrices.get((int) (0.51 * totalPrices.size()))); } } @Test public void testComplexQuery() throws Exception { MaterializedResult actual = computeActual("SELECT sum(orderkey), row_number() OVER (ORDER BY orderkey)\n" + "FROM orders\n" + "WHERE orderkey <= 10\n" + "GROUP BY orderkey\n" + "HAVING sum(orderkey) >= 3\n" + "ORDER BY orderkey DESC\n" + "LIMIT 3"); MaterializedResult expected = resultBuilder(FIXED_INT_64, FIXED_INT_64) .row(7, 5) .row(6, 4) .row(5, 3) .build(); assertEquals(actual, expected); } @Test public void testWhereNull() throws Exception { // This query is has this strange shape to force the compiler to leave a true on the stack // with the null flag set so if the filter method is not handling nulls correctly, this // query will fail assertQuery("SELECT custkey FROM orders WHERE custkey = custkey AND cast(nullif(custkey, custkey) as boolean) AND cast(nullif(custkey, custkey) as boolean)"); } @Test public void testSumOfNulls() throws Exception { assertQuery("SELECT orderstatus, sum(CAST(NULL AS BIGINT)) FROM orders GROUP BY orderstatus"); } @Test public void testApproximateCountDistinct() throws Exception { MaterializedResult actual = computeActual("SELECT approx_distinct(custkey) FROM orders"); MaterializedResult expected = resultBuilder(FIXED_INT_64) .row(971) .build(); assertEqualsIgnoreOrder(actual.getMaterializedTuples(), expected.getMaterializedTuples()); } @Test public void testApproximateCountDistinctGroupBy() throws Exception { MaterializedResult actual = computeActual("SELECT orderstatus, approx_distinct(custkey) FROM orders GROUP BY orderstatus"); MaterializedResult expected = resultBuilder(actual.getTupleInfo()) .row("O", 969) .row("F", 964) .row("P", 301) .build(); assertEqualsIgnoreOrder(actual.getMaterializedTuples(), expected.getMaterializedTuples()); } @Test public void testCountBoolean() throws Exception { assertQuery("SELECT COUNT(true) FROM orders"); } @Test public void testJoinWithMultiFieldGroupBy() throws Exception { assertQuery("SELECT orderstatus FROM lineitem JOIN (SELECT DISTINCT orderkey, orderstatus FROM ORDERS) T on lineitem.orderkey = T.orderkey"); } @Test public void testGroupByRepeatedField() throws Exception { assertQuery("SELECT sum(custkey) FROM orders GROUP BY orderstatus, orderstatus"); } @Test public void testGroupByRepeatedField2() throws Exception { assertQuery("SELECT count(*) FROM (select orderstatus a, orderstatus b FROM orders) GROUP BY a, b"); } @Test public void testGroupByMultipleFieldsWithPredicateOnAggregationArgument() throws Exception { assertQuery("SELECT custkey, orderstatus, MAX(orderkey) FROM ORDERS WHERE orderkey = 1 GROUP BY custkey, orderstatus"); } @Test public void testReorderOutputsOfGroupByAggregation() throws Exception { assertQuery( "SELECT orderstatus, a, custkey, b FROM (SELECT custkey, orderstatus, -COUNT(*) a, MAX(orderkey) b FROM ORDERS WHERE orderkey = 1 GROUP BY custkey, orderstatus) T"); } @Test public void testGroupAggregationOverNestedGroupByAggregation() throws Exception { assertQuery("SELECT sum(custkey), max(orderstatus), min(c) FROM (SELECT orderstatus, custkey, COUNT(*) c FROM ORDERS GROUP BY orderstatus, custkey) T"); } @Test public void testDistinctMultipleFields() throws Exception { assertQuery("SELECT DISTINCT custkey, orderstatus FROM ORDERS"); } @Test public void testArithmeticNegation() throws Exception { assertQuery("SELECT -custkey FROM orders"); } @Test public void testDistinct() throws Exception { assertQuery("SELECT DISTINCT custkey FROM orders"); } // TODO: we need to properly propagate exceptions with their actual classes @Test(expectedExceptions = Exception.class, expectedExceptionsMessageRegExp = "DISTINCT in aggregation parameters not yet supported") public void testCountDistinct() throws Exception { assertQuery("SELECT COUNT(DISTINCT custkey) FROM orders"); } @Test public void testDistinctWithOrderBy() throws Exception { assertQueryOrdered("SELECT DISTINCT custkey FROM orders ORDER BY custkey LIMIT 10"); } @Test(expectedExceptions = Exception.class, expectedExceptionsMessageRegExp = "For SELECT DISTINCT, ORDER BY expressions must appear in select list") public void testDistinctWithOrderByNotInSelect() throws Exception { assertQueryOrdered("SELECT DISTINCT custkey FROM orders ORDER BY orderkey LIMIT 10"); } @Test public void testOrderByLimit() throws Exception { assertQueryOrdered("SELECT custkey, orderstatus FROM ORDERS ORDER BY orderkey DESC LIMIT 10"); } @Test public void testOrderByExpressionWithLimit() throws Exception { assertQueryOrdered("SELECT custkey, orderstatus FROM ORDERS ORDER BY orderkey + 1 DESC LIMIT 10"); } @Test public void testGroupByOrderByLimit() throws Exception { assertQueryOrdered("SELECT custkey, SUM(totalprice) FROM ORDERS GROUP BY custkey ORDER BY SUM(totalprice) DESC LIMIT 10"); } @Test public void testLimitZero() throws Exception { assertQuery("SELECT custkey, totalprice FROM orders LIMIT 0"); } @Test public void testRepeatedAggregations() throws Exception { assertQuery("SELECT SUM(orderkey), SUM(orderkey) FROM ORDERS"); } @Test public void testRepeatedOutputs() throws Exception { assertQuery("SELECT orderkey a, orderkey b FROM ORDERS WHERE orderstatus = 'F'"); } @Test public void testLimit() throws Exception { MaterializedResult actual = computeActual("SELECT orderkey FROM ORDERS LIMIT 10"); MaterializedResult all = computeExpected("SELECT orderkey FROM ORDERS", actual.getTupleInfo()); assertEquals(actual.getMaterializedTuples().size(), 10); assertTrue(all.getMaterializedTuples().containsAll(actual.getMaterializedTuples())); } @Test public void testAggregationWithLimit() throws Exception { MaterializedResult actual = computeActual("SELECT custkey, SUM(totalprice) FROM ORDERS GROUP BY custkey LIMIT 10"); MaterializedResult all = computeExpected("SELECT custkey, SUM(totalprice) FROM ORDERS GROUP BY custkey", actual.getTupleInfo()); assertEquals(actual.getMaterializedTuples().size(), 10); assertTrue(all.getMaterializedTuples().containsAll(actual.getMaterializedTuples())); } @Test public void testLimitInInlineView() throws Exception { MaterializedResult actual = computeActual("SELECT orderkey FROM (SELECT orderkey FROM ORDERS LIMIT 100) T LIMIT 10"); MaterializedResult all = computeExpected("SELECT orderkey FROM ORDERS", actual.getTupleInfo()); assertEquals(actual.getMaterializedTuples().size(), 10); assertTrue(all.getMaterializedTuples().containsAll(actual.getMaterializedTuples())); } @Test public void testCountAll() throws Exception { assertQuery("SELECT COUNT(*) FROM ORDERS"); } @Test public void testCountColumn() throws Exception { assertQuery("SELECT COUNT(orderkey) FROM ORDERS"); assertQuery("SELECT COUNT(orderstatus) FROM ORDERS"); assertQuery("SELECT COUNT(orderdate) FROM ORDERS"); assertQuery("SELECT COUNT(1) FROM ORDERS"); assertQuery("SELECT COUNT(NULLIF(orderstatus, 'F')) FROM ORDERS"); assertQuery("SELECT COUNT(CAST(NULL AS BIGINT)) FROM ORDERS"); // todo: make COUNT(null) work } @Test public void testWildcard() throws Exception { assertQuery("SELECT * FROM ORDERS"); } @Test public void testMultipleWildcards() throws Exception { assertQuery("SELECT *, 123, * FROM ORDERS"); } @Test public void testMixedWildcards() throws Exception { assertQuery("SELECT *, orders.*, orderkey FROM orders"); } @Test public void testQualifiedWildcardFromAlias() throws Exception { assertQuery("SELECT T.* FROM ORDERS T"); } @Test public void testQualifiedWildcardFromInlineView() throws Exception { assertQuery("SELECT T.* FROM (SELECT orderkey + custkey FROM ORDERS) T"); } @Test public void testQualifiedWildcard() throws Exception { assertQuery("SELECT ORDERS.* FROM ORDERS"); } @Test public void testAverageAll() throws Exception { assertQuery("SELECT AVG(totalprice) FROM ORDERS"); } @Test public void testVariance() throws Exception { // int64 assertQuery("SELECT VAR_SAMP(custkey) FROM ORDERS"); assertQuery("SELECT VAR_SAMP(custkey) FROM (SELECT custkey FROM ORDERS ORDER BY custkey LIMIT 2) T"); assertQuery("SELECT VAR_SAMP(custkey) FROM (SELECT custkey FROM ORDERS ORDER BY custkey LIMIT 1) T"); assertQuery("SELECT VAR_SAMP(custkey) FROM (SELECT custkey FROM ORDERS LIMIT 0) T"); // double assertQuery("SELECT VAR_SAMP(totalprice) FROM ORDERS"); assertQuery("SELECT VAR_SAMP(totalprice) FROM (SELECT totalprice FROM ORDERS ORDER BY totalprice LIMIT 2) T"); assertQuery("SELECT VAR_SAMP(totalprice) FROM (SELECT totalprice FROM ORDERS ORDER BY totalprice LIMIT 1) T"); assertQuery("SELECT VAR_SAMP(totalprice) FROM (SELECT totalprice FROM ORDERS LIMIT 0) T"); } @Test public void testVariancePop() throws Exception { // int64 assertQuery("SELECT VAR_POP(custkey) FROM ORDERS"); assertQuery("SELECT VAR_POP(custkey) FROM (SELECT custkey FROM ORDERS ORDER BY custkey LIMIT 2) T"); assertQuery("SELECT VAR_POP(custkey) FROM (SELECT custkey FROM ORDERS ORDER BY custkey LIMIT 1) T"); assertQuery("SELECT VAR_POP(custkey) FROM (SELECT custkey FROM ORDERS LIMIT 0) T"); // double assertQuery("SELECT VAR_POP(totalprice) FROM ORDERS"); assertQuery("SELECT VAR_POP(totalprice) FROM (SELECT totalprice FROM ORDERS ORDER BY totalprice LIMIT 2) T"); assertQuery("SELECT VAR_POP(totalprice) FROM (SELECT totalprice FROM ORDERS ORDER BY totalprice LIMIT 1) T"); assertQuery("SELECT VAR_POP(totalprice) FROM (SELECT totalprice FROM ORDERS LIMIT 0) T"); } @Test public void testStdDev() throws Exception { // int64 assertQuery("SELECT STDDEV_SAMP(custkey) FROM ORDERS"); assertQuery("SELECT STDDEV_SAMP(custkey) FROM (SELECT custkey FROM ORDERS ORDER BY custkey LIMIT 2) T"); assertQuery("SELECT STDDEV_SAMP(custkey) FROM (SELECT custkey FROM ORDERS ORDER BY custkey LIMIT 1) T"); assertQuery("SELECT STDDEV_SAMP(custkey) FROM (SELECT custkey FROM ORDERS LIMIT 0) T"); // double assertQuery("SELECT STDDEV_SAMP(totalprice) FROM ORDERS"); assertQuery("SELECT STDDEV_SAMP(totalprice) FROM (SELECT totalprice FROM ORDERS ORDER BY totalprice LIMIT 2) T"); assertQuery("SELECT STDDEV_SAMP(totalprice) FROM (SELECT totalprice FROM ORDERS ORDER BY totalprice LIMIT 1) T"); assertQuery("SELECT STDDEV_SAMP(totalprice) FROM (SELECT totalprice FROM ORDERS LIMIT 0) T"); } @Test public void testStdDevPop() throws Exception { // int64 assertQuery("SELECT STDDEV_POP(custkey) FROM ORDERS"); assertQuery("SELECT STDDEV_POP(custkey) FROM (SELECT custkey FROM ORDERS ORDER BY custkey LIMIT 2) T"); assertQuery("SELECT STDDEV_POP(custkey) FROM (SELECT custkey FROM ORDERS ORDER BY custkey LIMIT 1) T"); assertQuery("SELECT STDDEV_POP(custkey) FROM (SELECT custkey FROM ORDERS LIMIT 0) T"); // double assertQuery("SELECT STDDEV_POP(totalprice) FROM ORDERS"); assertQuery("SELECT STDDEV_POP(totalprice) FROM (SELECT totalprice FROM ORDERS ORDER BY totalprice LIMIT 2) T"); assertQuery("SELECT STDDEV_POP(totalprice) FROM (SELECT totalprice FROM ORDERS ORDER BY totalprice LIMIT 1) T"); assertQuery("SELECT STDDEV_POP(totalprice) FROM (SELECT totalprice FROM ORDERS LIMIT 0) T"); } @Test public void testCountAllWithPredicate() throws Exception { assertQuery("SELECT COUNT(*) FROM ORDERS WHERE orderstatus = 'F'"); } @Test public void testGroupByNoAggregations() throws Exception { assertQuery("SELECT custkey FROM ORDERS GROUP BY custkey"); } @Test public void testGroupByCount() throws Exception { assertQuery( "SELECT orderstatus, COUNT(*) FROM ORDERS GROUP BY orderstatus", "SELECT orderstatus, CAST(COUNT(*) AS INTEGER) FROM orders GROUP BY orderstatus" ); } @Test public void testGroupByMultipleFields() throws Exception { assertQuery("SELECT custkey, orderstatus, COUNT(*) FROM ORDERS GROUP BY custkey, orderstatus"); } @Test public void testGroupByWithAlias() throws Exception { assertQuery( "SELECT orderdate x, COUNT(*) FROM orders GROUP BY orderdate", "SELECT orderdate x, CAST(COUNT(*) AS INTEGER) FROM orders GROUP BY orderdate" ); } @Test public void testGroupBySum() throws Exception { assertQuery("SELECT orderstatus, SUM(totalprice) FROM ORDERS GROUP BY orderstatus"); } @Test public void testGroupByWithWildcard() throws Exception { assertQuery("SELECT * FROM (SELECT orderkey FROM orders) t GROUP BY orderkey"); } @Test public void testCountAllWithComparison() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem WHERE tax < discount"); } @Test public void testSelectWithComparison() throws Exception { assertQuery("SELECT orderkey FROM lineitem WHERE tax < discount"); } @Test public void testCountWithNotPredicate() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem WHERE NOT tax < discount"); } @Test public void testCountWithNullPredicate() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem WHERE NULL"); } @Test public void testCountWithIsNullPredicate() throws Exception { assertQuery( "SELECT COUNT(*) FROM orders WHERE NULLIF(orderstatus, 'F') IS NULL", "SELECT COUNT(*) FROM orders WHERE orderstatus = 'F' " ); } @Test public void testCountWithIsNotNullPredicate() throws Exception { assertQuery( "SELECT COUNT(*) FROM orders WHERE NULLIF(orderstatus, 'F') IS NOT NULL", "SELECT COUNT(*) FROM orders WHERE orderstatus <> 'F' " ); } @Test public void testCountWithNullIfPredicate() throws Exception { assertQuery("SELECT COUNT(*) FROM orders WHERE NULLIF(orderstatus, 'F') = orderstatus "); } @Test public void testCountWithCoalescePredicate() throws Exception { assertQuery( "SELECT COUNT(*) FROM orders WHERE COALESCE(NULLIF(orderstatus, 'F'), 'bar') = 'bar'", "SELECT COUNT(*) FROM orders WHERE orderstatus = 'F'" ); } @Test public void testCountWithAndPredicate() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem WHERE tax < discount AND tax > 0.01 AND discount < 0.05"); } @Test public void testCountWithOrPredicate() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem WHERE tax < 0.01 OR discount > 0.05"); } @Test public void testCountWithInlineView() throws Exception { assertQuery("SELECT COUNT(*) FROM (SELECT orderkey FROM lineitem) x"); } @Test public void testNestedCount() throws Exception { assertQuery("SELECT COUNT(*) FROM (SELECT orderkey, COUNT(*) FROM lineitem GROUP BY orderkey) x"); } @Test public void testAggregationWithProjection() throws Exception { assertQuery("SELECT sum(totalprice * 2) - sum(totalprice) FROM orders"); } @Test public void testAggregationWithProjection2() throws Exception { assertQuery("SELECT sum(totalprice * 2) + sum(totalprice * 2) FROM orders"); } @Test public void testInlineView() throws Exception { assertQuery("SELECT orderkey, custkey FROM (SELECT orderkey, custkey FROM ORDERS) U"); } @Test public void testAliasedInInlineView() throws Exception { assertQuery("SELECT x, y FROM (SELECT orderkey x, custkey y FROM ORDERS) U"); } @Test public void testInlineViewWithProjections() throws Exception { assertQuery("SELECT x + 1, y FROM (SELECT orderkey * 10 x, custkey y FROM ORDERS) u"); } @Test public void testGroupByWithoutAggregation() throws Exception { assertQuery("SELECT orderstatus FROM orders GROUP BY orderstatus"); } @Test public void testHistogram() throws Exception { assertQuery("SELECT lines, COUNT(*) FROM (SELECT orderkey, COUNT(*) lines FROM lineitem GROUP BY orderkey) U GROUP BY lines"); } @Test public void testSimpleJoin() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem JOIN orders ON lineitem.orderkey = orders.orderkey"); } @Test public void testJoinWithRightConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem JOIN orders ON lineitem.orderkey = 2"); } @Test public void testJoinWithLeftConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem JOIN orders ON orders.orderkey = 2"); } @Test public void testSimpleJoinWithLeftConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem JOIN orders ON lineitem.orderkey = orders.orderkey AND orders.orderkey = 2"); } @Test public void testSimpleJoinWithRightConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem JOIN orders ON lineitem.orderkey = orders.orderkey AND lineitem.orderkey = 2"); } @Test public void testJoinDoubleClauseWithLeftOverlap() throws Exception { // Checks to make sure that we properly handle duplicate field references in join clauses assertQuery("SELECT COUNT(*) FROM lineitem JOIN orders ON lineitem.orderkey = orders.orderkey AND lineitem.orderkey = orders.custkey"); } @Test public void testJoinDoubleClauseWithRightOverlap() throws Exception { // Checks to make sure that we properly handle duplicate field references in join clauses assertQuery("SELECT COUNT(*) FROM lineitem JOIN orders ON lineitem.orderkey = orders.orderkey AND orders.orderkey = lineitem.partkey"); } @Test public void testJoinWithAlias() throws Exception { assertQuery("SELECT * FROM (lineitem JOIN orders ON lineitem.orderkey = orders.orderkey) x"); } @Test public void testJoinWithConstantExpression() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem JOIN orders ON lineitem.orderkey = orders.orderkey AND 123 = 123"); } @Test(expectedExceptions = Exception.class, expectedExceptionsMessageRegExp = ".*not supported.*") public void testJoinOnConstantExpression() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem JOIN orders ON 123 = 123"); } @Test public void testJoinUsing() throws Exception { assertQuery( "SELECT COUNT(*) FROM lineitem join orders using (orderkey)", "SELECT COUNT(*) FROM lineitem join orders on lineitem.orderkey = orders.orderkey" ); } @Test public void testJoinWithReversedComparison() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem JOIN orders ON orders.orderkey = lineitem.orderkey"); } @Test public void testJoinWithComplexExpressions() throws Exception { assertQuery("SELECT SUM(custkey) FROM lineitem JOIN orders ON lineitem.orderkey = CAST(orders.orderkey AS BIGINT)"); } @Test public void testJoinWithComplexExpressions2() throws Exception { assertQuery( "SELECT SUM(custkey) FROM lineitem JOIN orders ON lineitem.orderkey = CASE WHEN orders.custkey = 1 and orders.orderstatus = 'F' THEN orders.orderkey ELSE NULL END"); } @Test public void testJoinWithComplexExpressions3() throws Exception { assertQuery( "SELECT SUM(custkey) FROM lineitem JOIN orders ON lineitem.orderkey + 1 = orders.orderkey + 1", "SELECT SUM(custkey) FROM lineitem JOIN orders ON lineitem.orderkey = orders.orderkey " // H2 takes a million years because it can't join efficiently on a non-indexed field/expression ); } @Test public void testSelfJoin() throws Exception { assertQuery("SELECT COUNT(*) FROM orders a JOIN orders b on a.orderkey = b.orderkey"); } @Test public void testWildcardFromJoin() throws Exception { assertQuery( "SELECT * FROM (select orderkey, partkey from lineitem) a join (select orderkey, custkey from orders) b using (orderkey)", "SELECT * FROM (select orderkey, partkey from lineitem) a join (select orderkey, custkey from orders) b on a.orderkey = b.orderkey" ); } @Test public void testQualifiedWildcardFromJoin() throws Exception { assertQuery( "SELECT a.*, b.* FROM (select orderkey, partkey from lineitem) a join (select orderkey, custkey from orders) b using (orderkey)", "SELECT a.*, b.* FROM (select orderkey, partkey from lineitem) a join (select orderkey, custkey from orders) b on a.orderkey = b.orderkey" ); } @Test public void testJoinAggregations() throws Exception { assertQuery( "SELECT x + y FROM (" + " SELECT orderdate, COUNT(*) x FROM orders GROUP BY orderdate) a JOIN (" + " SELECT orderdate, COUNT(*) y FROM orders GROUP BY orderdate) b ON a.orderdate = b.orderdate"); } @Test public void testJoinOnMultipleFields() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem JOIN orders ON lineitem.orderkey = orders.orderkey AND lineitem.shipdate = orders.orderdate"); } @Test public void testJoinUsingMultipleFields() throws Exception { assertQuery( "SELECT COUNT(*) FROM lineitem JOIN (SELECT orderkey, orderdate shipdate FROM ORDERS) T USING (orderkey, shipdate)", "SELECT COUNT(*) FROM lineitem JOIN orders ON lineitem.orderkey = orders.orderkey AND lineitem.shipdate = orders.orderdate" ); } @Test public void testJoinWithNonJoinExpression() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem JOIN orders ON lineitem.orderkey = orders.orderkey AND orders.custkey = 1"); } @Test public void testLeftFilteredJoin() throws Exception { // Test predicate move around assertQuery("SELECT custkey, linestatus, tax, totalprice, orderstatus FROM (SELECT * FROM lineitem WHERE orderkey % 2 = 0) a JOIN orders ON a.orderkey = orders.orderkey"); } @Test public void testRightFilteredJoin() throws Exception { // Test predicate move around assertQuery("SELECT custkey, linestatus, tax, totalprice, orderstatus FROM lineitem JOIN (SELECT * FROM orders WHERE orderkey % 2 = 0) a ON lineitem.orderkey = a.orderkey"); } @Test public void testJoinWithFullyPushedDownJoinClause() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem JOIN orders ON orders.custkey = 1 AND lineitem.orderkey = 1"); } @Test public void testJoinPredicateMoveAround() throws Exception { assertQuery("SELECT COUNT(*)\n" + "FROM (SELECT * FROM lineitem WHERE orderkey % 16 = 0 AND partkey % 2 = 0) lineitem\n" + "JOIN (SELECT * FROM orders WHERE orderkey % 16 = 0 AND custkey % 2 = 0) orders\n" + "ON lineitem.orderkey % 8 = orders.orderkey % 8 AND lineitem.linenumber % 2 = 0\n" + "WHERE orders.custkey % 8 < 7 AND orders.custkey % 8 = lineitem.orderkey % 8 AND lineitem.suppkey % 7 > orders.custkey % 7"); } @Test public void testSimpleLeftJoin() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem LEFT JOIN orders ON lineitem.orderkey = orders.orderkey"); assertQuery("SELECT COUNT(*) FROM lineitem LEFT OUTER JOIN orders ON lineitem.orderkey = orders.orderkey"); } @Test public void testLeftJoinNormalizedToInner() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem LEFT JOIN orders ON lineitem.orderkey = orders.orderkey WHERE orders.orderkey IS NOT NULL"); } @Test public void testLeftJoinWithRightConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM (SELECT * FROM lineitem WHERE orderkey % 1024 = 0) lineitem LEFT JOIN orders ON lineitem.orderkey = 1024"); } @Test public void testLeftJoinWithLeftConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM (SELECT * FROM lineitem WHERE orderkey % 1024 = 0) lineitem LEFT JOIN orders ON orders.orderkey = 1024"); } @Test public void testSimpleLeftJoinWithLeftConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem LEFT JOIN orders ON lineitem.orderkey = orders.orderkey AND orders.orderkey = 2"); } @Test public void testSimpleLeftJoinWithRightConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem LEFT JOIN orders ON lineitem.orderkey = orders.orderkey AND lineitem.orderkey = 2"); } @Test public void testDoubleFilteredLeftJoinWithRightConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM (SELECT * FROM lineitem WHERE orderkey % 1024 = 0) lineitem LEFT JOIN (SELECT * FROM orders WHERE orderkey % 1024 = 0) orders ON orders.orderkey = 1024"); } @Test public void testDoubleFilteredLeftJoinWithLeftConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM (SELECT * FROM lineitem WHERE orderkey % 1024 = 0) lineitem LEFT JOIN (SELECT * FROM orders WHERE orderkey % 1024 = 0) orders ON lineitem.orderkey = 1024"); } @Test public void testLeftJoinDoubleClauseWithLeftOverlap() throws Exception { // Checks to make sure that we properly handle duplicate field references in join clauses assertQuery("SELECT COUNT(*) FROM lineitem LEFT JOIN orders ON lineitem.orderkey = orders.orderkey AND lineitem.orderkey = orders.custkey"); } @Test public void testLeftJoinDoubleClauseWithRightOverlap() throws Exception { // Checks to make sure that we properly handle duplicate field references in join clauses assertQuery("SELECT COUNT(*) FROM lineitem LEFT JOIN orders ON lineitem.orderkey = orders.orderkey AND orders.orderkey = lineitem.partkey"); } @Test public void testBuildFilteredLeftJoin() throws Exception { assertQuery("SELECT * FROM lineitem LEFT JOIN (SELECT * FROM orders WHERE orderkey % 2 = 0) a ON lineitem.orderkey = a.orderkey"); } @Test public void testProbeFilteredLeftJoin() throws Exception { assertQuery("SELECT * FROM (SELECT * FROM lineitem WHERE orderkey % 2 = 0) a LEFT JOIN orders ON a.orderkey = orders.orderkey"); } @Test public void testLeftJoinPredicateMoveAround() throws Exception { assertQuery("SELECT COUNT(*)\n" + "FROM (SELECT * FROM lineitem WHERE orderkey % 16 = 0 AND partkey % 2 = 0) lineitem\n" + "LEFT JOIN (SELECT * FROM orders WHERE orderkey % 16 = 0 AND custkey % 2 = 0) orders\n" + "ON lineitem.orderkey % 8 = orders.orderkey % 8\n" + "WHERE (orders.custkey % 8 < 7 OR orders.custkey % 8 IS NULL) AND orders.custkey % 8 = lineitem.orderkey % 8"); } @Test public void testLeftJoinEqualityInference() throws Exception { // Test that we can infer orders.orderkey % 4 = orders.custkey % 3 on the inner side assertQuery("SELECT COUNT(*)\n" + "FROM (SELECT * FROM lineitem WHERE orderkey % 4 = 0 AND suppkey % 2 = partkey % 2 AND linenumber % 3 = orderkey % 3) lineitem\n" + "LEFT JOIN (SELECT * FROM orders WHERE orderkey % 4 = 0) orders\n" + "ON lineitem.linenumber % 3 = orders.orderkey % 4 AND lineitem.orderkey % 3 = orders.custkey % 3\n" + "WHERE lineitem.suppkey % 2 = lineitem.linenumber % 3"); } @Test public void testSimpleRightJoin() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem RIGHT JOIN orders ON lineitem.orderkey = orders.orderkey"); assertQuery("SELECT COUNT(*) FROM lineitem RIGHT OUTER JOIN orders ON lineitem.orderkey = orders.orderkey"); } @Test public void testRightJoinNormalizedToInner() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem RIGHT JOIN orders ON lineitem.orderkey = orders.orderkey WHERE lineitem.orderkey IS NOT NULL"); } @Test public void testRightJoinWithRightConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM (SELECT * FROM lineitem WHERE orderkey % 1024 = 0) lineitem RIGHT JOIN orders ON lineitem.orderkey = 1024"); } @Test public void testRightJoinWithLeftConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM (SELECT * FROM lineitem WHERE orderkey % 1024 = 0) lineitem RIGHT JOIN orders ON orders.orderkey = 1024"); } @Test public void testDoubleFilteredRightJoinWithRightConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM (SELECT * FROM lineitem WHERE orderkey % 1024 = 0) lineitem RIGHT JOIN (SELECT * FROM orders WHERE orderkey % 1024 = 0) orders ON orders.orderkey = 1024"); } @Test public void testDoubleFilteredRightJoinWithLeftConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM (SELECT * FROM lineitem WHERE orderkey % 1024 = 0) lineitem RIGHT JOIN (SELECT * FROM orders WHERE orderkey % 1024 = 0) orders ON lineitem.orderkey = 1024"); } @Test public void testSimpleRightJoinWithLeftConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem RIGHT JOIN orders ON lineitem.orderkey = orders.orderkey AND orders.orderkey = 2"); } @Test public void testSimpleRightJoinWithRightConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem RIGHT JOIN orders ON lineitem.orderkey = orders.orderkey AND lineitem.orderkey = 2"); } @Test public void testRightJoinDoubleClauseWithLeftOverlap() throws Exception { // Checks to make sure that we properly handle duplicate field references in join clauses assertQuery("SELECT COUNT(*) FROM lineitem RIGHT JOIN orders ON lineitem.orderkey = orders.orderkey AND lineitem.orderkey = orders.custkey"); } @Test public void testRightJoinDoubleClauseWithRightOverlap() throws Exception { // Checks to make sure that we properly handle duplicate field references in join clauses assertQuery("SELECT COUNT(*) FROM lineitem RIGHT JOIN orders ON lineitem.orderkey = orders.orderkey AND orders.orderkey = lineitem.partkey"); } @Test public void testBuildFilteredRightJoin() throws Exception { assertQuery("SELECT custkey, linestatus, tax, totalprice, orderstatus FROM (SELECT * FROM lineitem WHERE orderkey % 2 = 0) a RIGHT JOIN orders ON a.orderkey = orders.orderkey"); } @Test public void testProbeFilteredRightJoin() throws Exception { assertQuery("SELECT custkey, linestatus, tax, totalprice, orderstatus FROM lineitem RIGHT JOIN (SELECT * FROM orders WHERE orderkey % 2 = 0) a ON lineitem.orderkey = a.orderkey"); } @Test public void testRightJoinPredicateMoveAround() throws Exception { assertQuery("SELECT COUNT(*)\n" + "FROM (SELECT * FROM orders WHERE orderkey % 16 = 0 AND custkey % 2 = 0) orders\n" + "RIGHT JOIN (SELECT * FROM lineitem WHERE orderkey % 16 = 0 AND partkey % 2 = 0) lineitem\n" + "ON lineitem.orderkey % 8 = orders.orderkey % 8\n" + "WHERE (orders.custkey % 8 < 7 OR orders.custkey % 8 IS NULL) AND orders.custkey % 8 = lineitem.orderkey % 8"); } @Test public void testRightJoinEqualityInference() throws Exception { // Test that we can infer orders.orderkey % 4 = orders.custkey % 3 on the inner side assertQuery("SELECT COUNT(*)\n" + "FROM (SELECT * FROM orders WHERE orderkey % 4 = 0) orders\n" + "RIGHT JOIN (SELECT * FROM lineitem WHERE orderkey % 4 = 0 AND suppkey % 2 = partkey % 2 AND linenumber % 3 = orderkey % 3) lineitem\n" + "ON lineitem.linenumber % 3 = orders.orderkey % 4 AND lineitem.orderkey % 3 = orders.custkey % 3\n" + "WHERE lineitem.suppkey % 2 = lineitem.linenumber % 3"); } @Test public void testOrderBy() throws Exception { assertQueryOrdered("SELECT orderstatus FROM orders ORDER BY orderstatus"); } @Test public void testOrderBy2() throws Exception { assertQueryOrdered("SELECT orderstatus FROM orders ORDER BY orderkey DESC"); } @Test public void testOrderByMultipleFields() throws Exception { assertQueryOrdered("SELECT custkey, orderstatus FROM orders ORDER BY custkey DESC, orderstatus"); } @Test public void testOrderByAlias() throws Exception { assertQueryOrdered("SELECT orderstatus x FROM orders ORDER BY x ASC"); } @Test public void testOrderByAliasWithSameNameAsUnselectedColumn() throws Exception { assertQueryOrdered("SELECT orderstatus orderdate FROM orders ORDER BY orderdate ASC"); } @Test public void testOrderByOrdinal() throws Exception { assertQueryOrdered("SELECT orderstatus, orderdate FROM orders ORDER BY 2, 1"); } @Test public void testOrderByOrdinalWithWildcard() throws Exception { assertQueryOrdered("SELECT * FROM orders ORDER BY 1"); } @Test public void testGroupByOrdinal() throws Exception { assertQuery( "SELECT orderstatus, sum(totalprice) FROM orders GROUP BY 1", "SELECT orderstatus, sum(totalprice) FROM orders GROUP BY orderstatus"); } @Test public void testGroupBySearchedCase() throws Exception { assertQuery("SELECT CASE WHEN orderstatus = 'O' THEN 'a' ELSE 'b' END, count(*)\n" + "FROM orders\n" + "GROUP BY CASE WHEN orderstatus = 'O' THEN 'a' ELSE 'b' END"); assertQuery( "SELECT CASE WHEN orderstatus = 'O' THEN 'a' ELSE 'b' END, count(*)\n" + "FROM orders\n" + "GROUP BY 1", "SELECT CASE WHEN orderstatus = 'O' THEN 'a' ELSE 'b' END, count(*)\n" + "FROM orders\n" + "GROUP BY CASE WHEN orderstatus = 'O' THEN 'a' ELSE 'b' END"); } @Test public void testGroupBySearchedCaseNoElse() throws Exception { // whole CASE in group by clause assertQuery("SELECT CASE WHEN orderstatus = 'O' THEN 'a' END, count(*)\n" + "FROM orders\n" + "GROUP BY CASE WHEN orderstatus = 'O' THEN 'a' END"); assertQuery( "SELECT CASE WHEN orderstatus = 'O' THEN 'a' END, count(*)\n" + "FROM orders\n" + "GROUP BY 1", "SELECT CASE WHEN orderstatus = 'O' THEN 'a' END, count(*)\n" + "FROM orders\n" + "GROUP BY CASE WHEN orderstatus = 'O' THEN 'a' END"); assertQuery("SELECT CASE WHEN true THEN orderstatus END, count(*)\n" + "FROM orders\n" + "GROUP BY orderstatus"); } @Test public void testGroupByCase() throws Exception { // whole CASE in group by clause assertQuery("SELECT CASE orderstatus WHEN 'O' THEN 'a' ELSE 'b' END, count(*)\n" + "FROM orders\n" + "GROUP BY CASE orderstatus WHEN 'O' THEN 'a' ELSE 'b' END"); assertQuery( "SELECT CASE orderstatus WHEN 'O' THEN 'a' ELSE 'b' END, count(*)\n" + "FROM orders\n" + "GROUP BY 1", "SELECT CASE orderstatus WHEN 'O' THEN 'a' ELSE 'b' END, count(*)\n" + "FROM orders\n" + "GROUP BY CASE orderstatus WHEN 'O' THEN 'a' ELSE 'b' END"); // operand in group by clause assertQuery("SELECT CASE orderstatus WHEN 'O' THEN 'a' ELSE 'b' END, count(*)\n" + "FROM orders\n" + "GROUP BY orderstatus"); // condition in group by clause assertQuery("SELECT CASE 'O' WHEN orderstatus THEN 'a' ELSE 'b' END, count(*)\n" + "FROM orders\n" + "GROUP BY orderstatus"); // 'then' in group by clause assertQuery("SELECT CASE 1 WHEN 1 THEN orderstatus ELSE 'x' END, count(*)\n" + "FROM orders\n" + "GROUP BY orderstatus"); // 'else' in group by clause assertQuery("SELECT CASE 1 WHEN 1 THEN 'x' ELSE orderstatus END, count(*)\n" + "FROM orders\n" + "GROUP BY orderstatus"); } @Test public void testGroupByCaseNoElse() throws Exception { // whole CASE in group by clause assertQuery("SELECT CASE orderstatus WHEN 'O' THEN 'a' END, count(*)\n" + "FROM orders\n" + "GROUP BY CASE orderstatus WHEN 'O' THEN 'a' END"); // operand in group by clause assertQuery("SELECT CASE orderstatus WHEN 'O' THEN 'a' END, count(*)\n" + "FROM orders\n" + "GROUP BY orderstatus"); // condition in group by clause assertQuery("SELECT CASE 'O' WHEN orderstatus THEN 'a' END, count(*)\n" + "FROM orders\n" + "GROUP BY orderstatus"); // 'then' in group by clause assertQuery("SELECT CASE 1 WHEN 1 THEN orderstatus END, count(*)\n" + "FROM orders\n" + "GROUP BY orderstatus"); } @Test public void testGroupByCast() throws Exception { // whole CAST in group by expression assertQuery("SELECT CAST(orderkey AS VARCHAR), count(*) FROM orders GROUP BY CAST(orderkey AS VARCHAR)"); assertQuery( "SELECT CAST(orderkey AS VARCHAR), count(*) FROM orders GROUP BY 1", "SELECT CAST(orderkey AS VARCHAR), count(*) FROM orders GROUP BY CAST(orderkey AS VARCHAR)"); // argument in group by expression assertQuery("SELECT CAST(orderkey AS VARCHAR), count(*) FROM orders GROUP BY orderkey"); } @Test public void testGroupByCoalesce() throws Exception { // whole COALESCE in group by assertQuery("SELECT COALESCE(orderkey, custkey), count(*) FROM orders GROUP BY COALESCE(orderkey, custkey)"); assertQuery( "SELECT COALESCE(orderkey, custkey), count(*) FROM orders GROUP BY 1", "SELECT COALESCE(orderkey, custkey), count(*) FROM orders GROUP BY COALESCE(orderkey, custkey)" ); // operands in group by assertQuery("SELECT COALESCE(orderkey, 1), count(*) FROM orders GROUP BY orderkey"); // operands in group by assertQuery("SELECT COALESCE(1, orderkey), count(*) FROM orders GROUP BY orderkey"); } @Test public void testGroupByNullIf() throws Exception { // whole NULLIF in group by assertQuery("SELECT NULLIF(orderkey, custkey), count(*) FROM orders GROUP BY NULLIF(orderkey, custkey)"); assertQuery( "SELECT NULLIF(orderkey, custkey), count(*) FROM orders GROUP BY 1", "SELECT NULLIF(orderkey, custkey), count(*) FROM orders GROUP BY NULLIF(orderkey, custkey)"); // first operand in group by assertQuery("SELECT NULLIF(orderkey, 1), count(*) FROM orders GROUP BY orderkey"); // second operand in group by assertQuery("SELECT NULLIF(1, orderkey), count(*) FROM orders GROUP BY orderkey"); } @Test public void testGroupByExtract() throws Exception { // whole expression in group by assertQuery("SELECT EXTRACT(YEAR FROM now()), count(*) FROM orders GROUP BY EXTRACT(YEAR FROM now())"); assertQuery( "SELECT EXTRACT(YEAR FROM now()), count(*) FROM orders GROUP BY 1", "SELECT EXTRACT(YEAR FROM now()), count(*) FROM orders GROUP BY EXTRACT(YEAR FROM now())"); // argument in group by assertQuery("SELECT EXTRACT(YEAR FROM now()), count(*) FROM orders GROUP BY now()"); } @Test public void testGroupByBetween() throws Exception { // whole expression in group by assertQuery("SELECT orderkey BETWEEN 1 AND 100 FROM orders GROUP BY orderkey BETWEEN 1 AND 100 "); // expression in group by assertQuery("SELECT CAST(orderkey BETWEEN 1 AND 100 AS BIGINT) FROM orders GROUP BY orderkey"); // min in group by assertQuery("SELECT CAST(50 BETWEEN orderkey AND 100 AS BIGINT) FROM orders GROUP BY orderkey"); // max in group by assertQuery("SELECT CAST(50 BETWEEN 1 AND orderkey AS BIGINT) FROM orders GROUP BY orderkey"); } @Test public void testHaving() throws Exception { assertQuery("SELECT orderstatus, sum(totalprice) FROM orders GROUP BY orderstatus HAVING orderstatus = 'O'"); } @Test public void testHaving2() throws Exception { assertQuery("SELECT custkey, sum(orderkey) FROM orders GROUP BY custkey HAVING sum(orderkey) > 400000"); } @Test public void testHaving3() throws Exception { assertQuery("SELECT custkey, sum(totalprice) * 2 FROM orders GROUP BY custkey HAVING avg(totalprice + 5) > 10"); } @Test public void testColumnAliases() throws Exception { assertQuery( "SELECT x, T.y, z + 1 FROM (SELECT custkey, orderstatus, totalprice FROM orders) T (x, y, z)", "SELECT custkey, orderstatus, totalprice + 1 FROM orders"); } @Test public void testSameInputToAggregates() throws Exception { assertQuery("SELECT max(a), max(b) FROM (SELECT custkey a, custkey b FROM orders) x"); } @SuppressWarnings("PointlessArithmeticExpression") @Test public void testWindowFunctionsExpressions() { MaterializedResult actual = computeActual("" + "SELECT orderkey, orderstatus\n" + ", row_number() OVER (ORDER BY orderkey * 2) *\n" + " row_number() OVER (ORDER BY orderkey DESC) + 100\n" + "FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 10) x\n" + "ORDER BY orderkey LIMIT 5"); MaterializedResult expected = resultBuilder(FIXED_INT_64, VARIABLE_BINARY, FIXED_INT_64) .row(1, "O", (1 * 10) + 100) .row(2, "O", (2 * 9) + 100) .row(3, "F", (3 * 8) + 100) .row(4, "O", (4 * 7) + 100) .row(5, "F", (5 * 6) + 100) .build(); assertEquals(actual, expected); } @Test public void testWindowFunctionsFromAggregate() throws Exception { MaterializedResult actual = computeActual("" + "SELECT * FROM (\n" + " SELECT orderstatus, clerk, sales\n" + " , rank() OVER (PARTITION BY x.orderstatus ORDER BY sales DESC) rnk\n" + " FROM (\n" + " SELECT orderstatus, clerk, sum(totalprice) sales\n" + " FROM orders\n" + " GROUP BY orderstatus, clerk\n" + " ) x\n" + ") x\n" + "WHERE rnk <= 2\n" + "ORDER BY orderstatus, rnk"); MaterializedResult expected = resultBuilder(VARIABLE_BINARY, VARIABLE_BINARY, DOUBLE, FIXED_INT_64) .row("F", "Clerk#000000090", 2784836.61, 1) .row("F", "Clerk#000000084", 2674447.15, 2) .row("O", "Clerk#000000500", 2569878.29, 1) .row("O", "Clerk#000000050", 2500162.92, 2) .row("P", "Clerk#000000071", 841820.99, 1) .row("P", "Clerk#000001000", 643679.49, 2) .build(); assertEquals(actual, expected); } @Test public void testOrderByWindowFunction() throws Exception { MaterializedResult actual = computeActual("" + "SELECT orderkey, row_number() OVER (ORDER BY orderkey)\n" + "FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 10)\n" + "ORDER BY 2 DESC\n" + "LIMIT 5"); MaterializedResult expected = resultBuilder(FIXED_INT_64, FIXED_INT_64) .row(34, 10) .row(33, 9) .row(32, 8) .row(7, 7) .row(6, 6) .build(); assertEquals(actual, expected); } @Test public void testScalarFunction() throws Exception { assertQuery("SELECT SUBSTR('Quadratically', 5, 6) FROM orders LIMIT 1"); } @Test public void testCast() throws Exception { assertQuery("SELECT CAST('1' AS BIGINT) FROM orders"); assertQuery("SELECT CAST(totalprice AS BIGINT) FROM orders"); assertQuery("SELECT CAST(orderkey AS DOUBLE) FROM orders"); assertQuery("SELECT CAST(orderkey AS VARCHAR) FROM orders"); assertQuery("SELECT CAST(orderkey AS BOOLEAN) FROM orders"); } @Test public void testConcatOperator() throws Exception { assertQuery("SELECT '12' || '34' FROM orders LIMIT 1"); } @Test public void testQuotedIdentifiers() throws Exception { assertQuery("SELECT \"TOTALPRICE\" \"my price\" FROM \"ORDERS\""); } @Test(expectedExceptions = Exception.class, expectedExceptionsMessageRegExp = ".*orderkey_1.*") public void testInvalidColumn() throws Exception { computeActual("select * from lineitem l join (select orderkey_1, custkey from orders) o on l.orderkey = o.orderkey_1"); } @Test public void testUnaliasedSubqueries() throws Exception { assertQuery("SELECT orderkey FROM (SELECT orderkey FROM orders)"); } @Test public void testUnaliasedSubqueries1() throws Exception { assertQuery("SELECT a FROM (SELECT orderkey a FROM orders)"); } @Test public void testJoinUnaliasedSubqueries() throws Exception { assertQuery( "SELECT COUNT(*) FROM (SELECT * FROM lineitem) join (SELECT * FROM orders) using (orderkey)", "SELECT COUNT(*) FROM lineitem join orders on lineitem.orderkey = orders.orderkey" ); } @Test public void testWith() throws Exception { assertQuery("" + "WITH a AS (SELECT * FROM orders) " + "SELECT * FROM a", "SELECT * FROM orders"); } @Test public void testWithQualifiedPrefix() throws Exception { assertQuery("" + "WITH a AS (SELECT 123 FROM orders LIMIT 1)" + "SELECT a.* FROM a", "SELECT 123 FROM orders LIMIT 1"); } @Test public void testWithAliased() throws Exception { assertQuery("" + "WITH a AS (SELECT * FROM orders) " + "SELECT * FROM a x", "SELECT * FROM orders"); } @Test public void testReferenceToWithQueryInFromClause() throws Exception { assertQuery( "WITH a AS (SELECT * FROM orders)" + "SELECT * FROM (" + " SELECT * FROM a" + ")", "SELECT * FROM orders"); } @Test public void testWithChaining() throws Exception { assertQuery("" + "WITH a AS (SELECT orderkey n FROM orders)\n" + ", b AS (SELECT n + 1 n FROM a)\n" + ", c AS (SELECT n + 1 n FROM b)\n" + "SELECT n + 1 FROM c", "SELECT orderkey + 3 FROM orders"); } @Test public void testWithSelfJoin() throws Exception { assertQuery("" + "WITH x AS (SELECT DISTINCT orderkey FROM orders ORDER BY orderkey LIMIT 10)\n" + "SELECT count(*) FROM x a JOIN x b USING (orderkey)", "" + "SELECT count(*)\n" + "FROM (SELECT DISTINCT orderkey FROM orders ORDER BY orderkey LIMIT 10) a\n" + "JOIN (SELECT DISTINCT orderkey FROM orders ORDER BY orderkey LIMIT 10) b ON a.orderkey = b.orderkey"); } @Test public void testWithNestedSubqueries() throws Exception { assertQuery("" + "WITH a AS (\n" + " WITH aa AS (SELECT 123 x FROM orders LIMIT 1)\n" + " SELECT x y FROM aa\n" + "), b AS (\n" + " WITH bb AS (\n" + " WITH bbb AS (SELECT y FROM a)\n" + " SELECT bbb.* FROM bbb\n" + " )\n" + " SELECT y z FROM bb\n" + ")\n" + "SELECT *\n" + "FROM (\n" + " WITH q AS (SELECT z w FROM b)\n" + " SELECT j.*, k.*\n" + " FROM a j\n" + " JOIN q k ON (j.y = k.w)\n" + ") t", "" + "SELECT 123, 123 FROM orders LIMIT 1"); } @Test(enabled = false) public void testWithColumnAliasing() throws Exception { assertQuery( "WITH a (id) AS (SELECT 123 FROM orders LIMIT 1) SELECT * FROM a", "SELECT 123 FROM orders LIMIT 1"); } @Test public void testWithHiding() throws Exception { assertQuery("" + "WITH a AS (SELECT custkey FROM orders), " + " b AS (" + " WITH a AS (SELECT orderkey FROM orders)" + " SELECT * FROM a" + // should refer to inner 'a' " )" + "SELECT * FROM b", "SELECT orderkey FROM orders" ); } @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "Recursive WITH queries are not supported") public void testWithRecursive() throws Exception { computeActual("WITH RECURSIVE a AS (SELECT 123 FROM dual) SELECT * FROM a"); } @Test public void testCaseNoElse() throws Exception { assertQuery("SELECT orderkey, CASE orderstatus WHEN 'O' THEN 'a' END FROM orders"); } @Test public void testIfExpression() throws Exception { assertQuery( "SELECT sum(IF(orderstatus = 'F', totalprice, 0.0)) FROM orders", "SELECT sum(CASE WHEN orderstatus = 'F' THEN totalprice ELSE 0.0 END) FROM orders"); assertQuery( "SELECT sum(IF(orderstatus = 'Z', totalprice)) FROM orders", "SELECT sum(CASE WHEN orderstatus = 'Z' THEN totalprice END) FROM orders"); assertQuery( "SELECT sum(IF(orderstatus = 'F', NULL, totalprice)) FROM orders", "SELECT sum(CASE WHEN orderstatus = 'F' THEN NULL ELSE totalprice END) FROM orders"); assertQuery( "SELECT IF(orderstatus = 'Z', orderkey / 0, orderkey) FROM orders", "SELECT CASE WHEN orderstatus = 'Z' THEN orderkey / 0 ELSE orderkey END FROM orders"); assertQuery( "SELECT sum(IF(NULLIF(orderstatus, 'F') <> 'F', totalprice, 5.1)) FROM orders", "SELECT sum(CASE WHEN NULLIF(orderstatus, 'F') <> 'F' THEN totalprice ELSE 5.1 END) FROM orders"); } @Test public void testIn() throws Exception { assertQuery("SELECT orderkey FROM orders WHERE orderkey IN (1, 2, 3)"); assertQuery("SELECT orderkey FROM orders WHERE orderkey IN (1.5, 2.3)"); assertQuery("SELECT orderkey FROM orders WHERE totalprice IN (1, 2, 3)"); } @Test public void testGroupByIf() throws Exception { assertQuery( "SELECT IF(orderkey between 1 and 5, 'orders', 'others'), sum(totalprice) FROM orders GROUP BY 1", "SELECT CASE WHEN orderkey BETWEEN 1 AND 5 THEN 'orders' ELSE 'others' END, sum(totalprice)\n" + "FROM orders\n" + "GROUP BY CASE WHEN orderkey BETWEEN 1 AND 5 THEN 'orders' ELSE 'others' END"); } @Test public void testDuplicateFields() throws Exception { assertQuery( "SELECT * FROM (SELECT orderkey, orderkey FROM orders)", "SELECT orderkey, orderkey FROM orders"); } @Test public void testWildcardFromSubquery() throws Exception { assertQuery("SELECT * FROM (SELECT orderkey X FROM orders)"); } @Test public void testCaseInsensitiveOutputAliasInOrderBy() throws Exception { assertQueryOrdered("SELECT orderkey X FROM orders ORDER BY x"); } @Test public void testCaseInsensitiveAttribute() throws Exception { assertQuery("SELECT x FROM (SELECT orderkey X FROM orders)"); } @Test public void testCaseInsensitiveAliasedRelation() throws Exception { assertQuery("SELECT A.* FROM orders a"); } @Test public void testSubqueryBody() throws Exception { assertQuery("(SELECT orderkey, custkey FROM ORDERS)"); } @Test public void testSubqueryBodyOrderLimit() throws Exception { assertQueryOrdered("(SELECT orderkey AS a, custkey AS b FROM ORDERS) ORDER BY a LIMIT 1"); } @Test public void testSubqueryBodyProjectedOrderby() throws Exception { assertQueryOrdered("(SELECT orderkey, custkey FROM ORDERS) ORDER BY orderkey * -1"); } @Test public void testSubqueryBodyDoubleOrderby() throws Exception { assertQueryOrdered("(SELECT orderkey, custkey FROM ORDERS ORDER BY custkey) ORDER BY orderkey"); } @Test public void testNodeRoster() throws Exception { List<MaterializedTuple> result = computeActual("SELECT * FROM sys.node").getMaterializedTuples(); assertEquals(result.size(), getNodeCount()); } @Test public void testDual() throws Exception { MaterializedResult result = computeActual("SELECT * FROM dual"); List<MaterializedTuple> tuples = result.getMaterializedTuples(); assertEquals(tuples.size(), 1); } @Test public void testDefaultExplainTextFormat() { String query = "SELECT 123 FROM dual"; MaterializedResult result = computeActual("EXPLAIN " + query); String actual = Iterables.getOnlyElement(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertEquals(actual, getExplainPlan(query, LOGICAL)); } @Test public void testDefaultExplainGraphvizFormat() { String query = "SELECT 123 FROM dual"; MaterializedResult result = computeActual("EXPLAIN (FORMAT GRAPHVIZ) " + query); String actual = Iterables.getOnlyElement(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertEquals(actual, getGraphvizExplainPlan(query, LOGICAL)); } @Test public void testLogicalExplain() { String query = "SELECT 123 FROM dual"; MaterializedResult result = computeActual("EXPLAIN (TYPE LOGICAL) " + query); String actual = Iterables.getOnlyElement(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertEquals(actual, getExplainPlan(query, LOGICAL)); } @Test public void testLogicalExplainTextFormat() { String query = "SELECT 123 FROM dual"; MaterializedResult result = computeActual("EXPLAIN (TYPE LOGICAL, FORMAT TEXT) " + query); String actual = Iterables.getOnlyElement(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertEquals(actual, getExplainPlan(query, LOGICAL)); } @Test public void testLogicalExplainGraphvizFormat() { String query = "SELECT 123 FROM dual"; MaterializedResult result = computeActual("EXPLAIN (TYPE LOGICAL, FORMAT GRAPHVIZ) " + query); String actual = Iterables.getOnlyElement(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertEquals(actual, getGraphvizExplainPlan(query, LOGICAL)); } @Test public void testDistributedExplain() { String query = "SELECT 123 FROM dual"; MaterializedResult result = computeActual("EXPLAIN (TYPE DISTRIBUTED) " + query); String actual = Iterables.getOnlyElement(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertEquals(actual, getExplainPlan(query, DISTRIBUTED)); } @Test public void testDistributedExplainTextFormat() { String query = "SELECT 123 FROM dual"; MaterializedResult result = computeActual("EXPLAIN (TYPE DISTRIBUTED, FORMAT TEXT) " + query); String actual = Iterables.getOnlyElement(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertEquals(actual, getExplainPlan(query, DISTRIBUTED)); } @Test public void testDistributedExplainGraphvizFormat() { String query = "SELECT 123 FROM dual"; MaterializedResult result = computeActual("EXPLAIN (TYPE DISTRIBUTED, FORMAT GRAPHVIZ) " + query); String actual = Iterables.getOnlyElement(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertEquals(actual, getGraphvizExplainPlan(query, DISTRIBUTED)); } @Test public void testShowSchemas() throws Exception { MaterializedResult result = computeActual("SHOW SCHEMAS"); ImmutableSet<String> schemaNames = ImmutableSet.copyOf(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertTrue(schemaNames.containsAll(ImmutableSet.of(TPCH_SCHEMA_NAME, InformationSchemaMetadata.INFORMATION_SCHEMA, "node"))); } @Test public void testShowTables() throws Exception { MaterializedResult result = computeActual("SHOW TABLES"); ImmutableSet<String> tableNames = ImmutableSet.copyOf(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertEquals(tableNames, ImmutableSet.of(TPCH_ORDERS_NAME, TPCH_LINEITEM_NAME)); } @Test public void testShowTablesFrom() throws Exception { MaterializedResult result = computeActual("SHOW TABLES FROM DEFAULT"); ImmutableSet<String> tableNames = ImmutableSet.copyOf(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertEquals(tableNames, ImmutableSet.of(TPCH_ORDERS_NAME, TPCH_LINEITEM_NAME)); result = computeActual("SHOW TABLES FROM TPCH.DEFAULT"); tableNames = ImmutableSet.copyOf(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertEquals(tableNames, ImmutableSet.of(TPCH_ORDERS_NAME, TPCH_LINEITEM_NAME)); result = computeActual("SHOW TABLES FROM UNKNOWN"); tableNames = ImmutableSet.copyOf(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertEquals(tableNames, ImmutableSet.of()); } @Test public void testShowTablesLike() throws Exception { MaterializedResult result = computeActual("SHOW TABLES LIKE 'or%'"); ImmutableSet<String> tableNames = ImmutableSet.copyOf(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertEquals(tableNames, ImmutableSet.of(TPCH_ORDERS_NAME)); } @Test public void testShowColumns() throws Exception { MaterializedResult actual = computeActual("SHOW COLUMNS FROM orders"); MaterializedResult expected = resultBuilder(VARIABLE_BINARY, VARIABLE_BINARY, BOOLEAN, BOOLEAN) .row("orderkey", "bigint", true, false) .row("custkey", "bigint", true, false) .row("orderstatus", "varchar", true, false) .row("totalprice", "double", true, false) .row("orderdate", "varchar", true, false) .row("orderpriority", "varchar", true, false) .row("clerk", "varchar", true, false) .row("shippriority", "bigint", true, false) .row("comment", "varchar", true, false) .build(); assertEquals(actual, expected); } @Test public void testShowPartitions() throws Exception { MaterializedResult result = computeActual("SHOW PARTITIONS FROM orders"); // table is not partitioned // TODO: add a partitioned table for tests and test where/order/limit assertEquals(result.getMaterializedTuples().size(), 0); } @Test public void testShowFunctions() throws Exception { MaterializedResult result = computeActual("SHOW FUNCTIONS"); ImmutableMultimap<String, MaterializedTuple> functions = Multimaps.index(result.getMaterializedTuples(), new Function<MaterializedTuple, String>() { @Override public String apply(MaterializedTuple input) { assertEquals(input.getFieldCount(), 5); return (String) input.getField(0); } }); assertTrue(functions.containsKey("avg"), "Expected function names " + functions + " to contain 'avg'"); assertEquals(functions.get("avg").asList().size(), 2); assertEquals(functions.get("avg").asList().get(0).getField(1), "double"); assertEquals(functions.get("avg").asList().get(0).getField(2), "bigint"); assertEquals(functions.get("avg").asList().get(0).getField(3), "aggregate"); assertEquals(functions.get("avg").asList().get(1).getField(1), "double"); assertEquals(functions.get("avg").asList().get(1).getField(2), "double"); assertEquals(functions.get("avg").asList().get(0).getField(3), "aggregate"); assertTrue(functions.containsKey("abs"), "Expected function names " + functions + " to contain 'abs'"); assertEquals(functions.get("abs").asList().get(0).getField(3), "scalar"); assertTrue(functions.containsKey("rand"), "Expected function names " + functions + " to contain 'rand'"); assertEquals(functions.get("rand").asList().get(0).getField(3), "scalar (non-deterministic)"); assertTrue(functions.containsKey("rank"), "Expected function names " + functions + " to contain 'rank'"); assertEquals(functions.get("rank").asList().get(0).getField(3), "window"); assertTrue(functions.containsKey("rank"), "Expected function names " + functions + " to contain 'split_part'"); assertEquals(functions.get("split_part").asList().get(0).getField(1), "varchar"); assertEquals(functions.get("split_part").asList().get(0).getField(2), "varchar, varchar, bigint"); assertEquals(functions.get("split_part").asList().get(0).getField(3), "scalar"); } @Test public void testNoFrom() throws Exception { assertQuery("SELECT 1 + 2, 3 + 4", "SELECT 1 + 2, 3 + 4 FROM orders LIMIT 1"); } @Test public void testTopNByMultipleFields() throws Exception { assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY orderkey ASC, custkey ASC LIMIT 10"); assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY orderkey ASC, custkey DESC LIMIT 10"); assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY orderkey DESC, custkey ASC LIMIT 10"); assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY orderkey DESC, custkey DESC LIMIT 10"); // now try with order by fields swapped assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY custkey ASC, orderkey ASC LIMIT 10"); assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY custkey ASC, orderkey DESC LIMIT 10"); assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY custkey DESC, orderkey ASC LIMIT 10"); assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY custkey DESC, orderkey DESC LIMIT 10"); } @Test public void testUnion() throws Exception { assertQuery("SELECT orderkey FROM orders UNION SELECT custkey FROM orders"); } @Test public void testUnionDistinct() throws Exception { assertQuery("SELECT orderkey FROM orders UNION DISTINCT SELECT custkey FROM orders"); } @Test public void testUnionAll() throws Exception { assertQuery("SELECT orderkey FROM orders UNION ALL SELECT custkey FROM orders"); } @Test public void testChainedUnionsWithOrder() throws Exception { assertQueryOrdered("SELECT orderkey FROM orders UNION (SELECT custkey FROM orders UNION SELECT linenumber FROM lineitem) UNION ALL SELECT orderkey FROM lineitem ORDER BY orderkey"); } @Test public void testSubqueryUnion() throws Exception { assertQueryOrdered("SELECT * FROM (SELECT orderkey FROM orders UNION SELECT custkey FROM orders UNION SELECT orderkey FROM orders) ORDER BY orderkey LIMIT 1000"); } @Test public void testSelectOnlyUnion() throws Exception { assertQuery("SELECT 123, 'foo' UNION ALL SELECT 999, 'bar'"); } @Test public void testMultiColumnUnionAll() throws Exception { assertQuery("SELECT * FROM orders UNION ALL SELECT * FROM orders"); } @Test public void testTableQuery() throws Exception { assertQuery("TABLE orders", "SELECT * FROM orders"); } @Test public void testTableQueryOrderLimit() throws Exception { assertQuery("TABLE orders ORDER BY orderkey LIMIT 10", "SELECT * FROM orders ORDER BY orderkey LIMIT 10", true); } @Test public void testTableQueryInUnion() throws Exception { assertQuery("(SELECT * FROM orders ORDER BY orderkey LIMIT 10) UNION ALL TABLE orders", "(SELECT * FROM orders ORDER BY orderkey LIMIT 10) UNION ALL SELECT * FROM orders"); } @Test public void testTableAsSubquery() throws Exception { assertQuery("(TABLE orders) ORDER BY orderkey", "(SELECT * FROM orders) ORDER BY orderkey", true); } @Test public void testLimitPushDown() throws Exception { MaterializedResult actual = computeActual( "(TABLE orders ORDER BY orderkey) UNION ALL " + "SELECT * FROM orders WHERE orderstatus = 'F' UNION ALL " + "(TABLE orders ORDER BY orderkey LIMIT 20) UNION ALL " + "(TABLE orders LIMIT 5) UNION ALL " + "TABLE orders LIMIT 10"); MaterializedResult all = computeExpected("SELECT * FROM ORDERS", actual.getTupleInfo()); assertEquals(actual.getMaterializedTuples().size(), 10); assertTrue(all.getMaterializedTuples().containsAll(actual.getMaterializedTuples())); } @Test public void testOrderLimitCompaction() throws Exception { assertQueryOrdered("SELECT * FROM (SELECT * FROM orders ORDER BY orderkey) LIMIT 10"); } @Test public void testUnaliasSymbolReferencesWithUnion() throws Exception { assertQuery("SELECT 1, 1, 'a', 'a' UNION ALL SELECT 1, 2, 'a', 'b'"); } @Test public void testSemiJoin() throws Exception { // Throw in a bunch of IN subquery predicates assertQuery("" + "SELECT *, o2.custkey\n" + " IN (\n" + " SELECT orderkey\n" + " FROM lineitem\n" + " WHERE orderkey % 5 = 0)\n" + "FROM (SELECT * FROM orders WHERE custkey % 256 = 0) o1\n" + "JOIN (SELECT * FROM orders WHERE custkey % 256 = 0) o2\n" + " ON (o1.orderkey IN (SELECT orderkey FROM lineitem WHERE orderkey % 4 = 0)) = (o2.orderkey IN (SELECT orderkey FROM lineitem WHERE orderkey % 4 = 0))\n" + "WHERE o1.orderkey\n" + " IN (\n" + " SELECT orderkey\n" + " FROM lineitem\n" + " WHERE orderkey % 4 = 0)\n" + "ORDER BY o1.orderkey\n" + " IN (\n" + " SELECT orderkey\n" + " FROM lineitem\n" + " WHERE orderkey % 7 = 0)"); assertQuery("" + "SELECT orderkey\n" + " IN (\n" + " SELECT orderkey\n" + " FROM lineitem\n" + " WHERE partkey % 4 = 0),\n" + " SUM(\n" + " CASE\n" + " WHEN orderkey\n" + " IN (\n" + " SELECT orderkey\n" + " FROM lineitem\n" + " WHERE suppkey % 4 = 0)\n" + " THEN 1\n" + " ELSE 0\n" + " END)\n" + "FROM orders\n" + "GROUP BY orderkey\n" + " IN (\n" + " SELECT orderkey\n" + " FROM lineitem\n" + " WHERE partkey % 4 = 0)\n" + "HAVING SUM(\n" + " CASE\n" + " WHEN orderkey\n" + " IN (\n" + " SELECT orderkey\n" + " FROM lineitem\n" + " WHERE suppkey % 4 = 0)\n" + " THEN 1\n" + " ELSE 0\n" + " END) > 1"); } @Test public void testAntiJoin() throws Exception { assertQuery("" + "SELECT *, orderkey\n" + " NOT IN (\n" + " SELECT orderkey\n" + " FROM lineitem\n" + " WHERE orderkey % 3 = 0)\n" + "FROM orders"); } @Test public void testSemiJoinLimitPushDown() throws Exception { assertQuery("" + "SELECT COUNT(*)\n" + "FROM (\n" + " SELECT orderkey\n" + " IN (\n" + " SELECT orderkey\n" + " FROM lineitem\n" + " WHERE orderkey % 2 = 0)\n" + " FROM orders\n" + " LIMIT 10)"); } @Test public void testSemiJoinNullHandling() throws Exception { assertQuery("" + "SELECT orderkey\n" + " IN (\n" + " SELECT CASE WHEN orderkey % 3 = 0 THEN NULL ELSE orderkey END\n" + " FROM lineitem)\n" + "FROM orders"); assertQuery("" + "SELECT orderkey\n" + " IN (\n" + " SELECT orderkey\n" + " FROM lineitem)\n" + "FROM (\n" + " SELECT CASE WHEN orderkey % 3 = 0 THEN NULL ELSE orderkey END AS orderkey\n" + " FROM orders)"); assertQuery("" + "SELECT orderkey\n" + " IN (\n" + " SELECT CASE WHEN orderkey % 3 = 0 THEN NULL ELSE orderkey END\n" + " FROM lineitem)\n" + "FROM (\n" + " SELECT CASE WHEN orderkey % 4 = 0 THEN NULL ELSE orderkey END AS orderkey\n" + " FROM orders)"); } @Test public void testPredicatePushdown() throws Exception { assertQuery("" + "SELECT *\n" + "FROM (\n" + " SELECT orderkey+1 as a FROM orders WHERE orderstatus = 'F' UNION ALL \n" + " SELECT orderkey FROM orders WHERE orderkey % 2 = 0 UNION ALL \n" + " (SELECT orderkey+custkey FROM orders ORDER BY orderkey LIMIT 10)\n" + ") \n" + "WHERE a < 20 OR a > 100 \n" + "ORDER BY a"); } @Test public void testJoinPredicatePushdown() throws Exception { assertQuery("" + "SELECT COUNT(*)\n" + "FROM lineitem \n" + "JOIN (\n" + " SELECT * FROM orders\n" + ") orders \n" + "ON lineitem.orderkey = orders.orderkey \n" + "WHERE orders.orderkey % 4 = 0\n" + " AND lineitem.suppkey > orders.orderkey"); } @Test public void testLeftJoinAsInnerPredicatePushdown() throws Exception { assertQuery("" + "SELECT COUNT(*)\n" + "FROM lineitem \n" + "LEFT JOIN (\n" + " SELECT * FROM orders WHERE orders.orderkey % 2 = 0\n" + ") orders \n" + "ON lineitem.orderkey = orders.orderkey \n" + "WHERE orders.orderkey % 4 = 0\n" + " AND (lineitem.suppkey % 2 = orders.orderkey % 2 OR orders.custkey IS NULL)"); } @Test public void testPlainLeftJoinPredicatePushdown() throws Exception { assertQuery("" + "SELECT COUNT(*)\n" + "FROM lineitem \n" + "LEFT JOIN (\n" + " SELECT * FROM orders WHERE orders.orderkey % 2 = 0\n" + ") orders \n" + "ON lineitem.orderkey = orders.orderkey \n" + "WHERE lineitem.orderkey % 4 = 0\n" + " AND (lineitem.suppkey % 2 = orders.orderkey % 2 OR orders.orderkey IS NULL)"); } @Test public void testLeftJoinPredicatePushdownWithSelfEquality() throws Exception { assertQuery("" + "SELECT COUNT(*)\n" + "FROM lineitem \n" + "LEFT JOIN (\n" + " SELECT * FROM orders WHERE orders.orderkey % 2 = 0\n" + ") orders \n" + "ON lineitem.orderkey = orders.orderkey \n" + "WHERE orders.orderkey = orders.orderkey\n" + " AND lineitem.orderkey % 4 = 0\n" + " AND (lineitem.suppkey % 2 = orders.orderkey % 2 OR orders.orderkey IS NULL)"); } @Test public void testRightJoinAsInnerPredicatePushdown() throws Exception { assertQuery("" + "SELECT COUNT(*)\n" + "FROM (\n" + " SELECT * FROM orders WHERE orders.orderkey % 2 = 0\n" + ") orders\n" + "RIGHT JOIN lineitem\n" + "ON lineitem.orderkey = orders.orderkey \n" + "WHERE orders.orderkey % 4 = 0\n" + " AND (lineitem.suppkey % 2 = orders.orderkey % 2 OR orders.custkey IS NULL)"); } @Test public void testPlainRightJoinPredicatePushdown() throws Exception { assertQuery("" + "SELECT COUNT(*)\n" + "FROM (\n" + " SELECT * FROM orders WHERE orders.orderkey % 2 = 0\n" + ") orders \n" + "RIGHT JOIN lineitem\n" + "ON lineitem.orderkey = orders.orderkey \n" + "WHERE lineitem.orderkey % 4 = 0\n" + " AND (lineitem.suppkey % 2 = orders.orderkey % 2 OR orders.orderkey IS NULL)"); } @Test public void testRightJoinPredicatePushdownWithSelfEquality() throws Exception { assertQuery("" + "SELECT COUNT(*)\n" + "FROM (\n" + " SELECT * FROM orders WHERE orders.orderkey % 2 = 0\n" + ") orders \n" + "RIGHT JOIN lineitem\n" + "ON lineitem.orderkey = orders.orderkey \n" + "WHERE orders.orderkey = orders.orderkey\n" + " AND lineitem.orderkey % 4 = 0\n" + " AND (lineitem.suppkey % 2 = orders.orderkey % 2 OR orders.orderkey IS NULL)"); } @Test public void testPredicatePushdownJoinEqualityGroups() throws Exception { assertQuery("" + "SELECT *\n" + "FROM (\n" + " SELECT custkey custkey1, custkey%4 custkey1a, custkey%8 custkey1b, custkey%16 custkey1c\n" + " FROM orders\n" + ") orders1 \n" + "JOIN (\n" + " SELECT custkey custkey2, custkey%4 custkey2a, custkey%8 custkey2b\n" + " FROM orders\n" + ") orders2 ON orders1.custkey1 = orders2.custkey2\n" + "WHERE custkey2a = custkey2b\n" + " AND custkey1 = custkey1a\n" + " AND custkey2 = custkey2a\n" + " AND custkey1a = custkey1c\n" + " AND custkey1b = custkey1c\n" + " AND custkey1b % 2 = 0"); } @Test public void testGroupByKeyPredicatePushdown() throws Exception { assertQuery("" + "SELECT *\n" + "FROM (\n" + " SELECT custkey1, orderstatus1, SUM(totalprice1) totalprice, MAX(custkey2) maxcustkey\n" + " FROM (\n" + " SELECT *\n" + " FROM (\n" + " SELECT custkey custkey1, orderstatus orderstatus1, CAST(totalprice AS BIGINT) totalprice1, orderkey orderkey1\n" + " FROM orders\n" + " ) orders1 \n" + " JOIN (\n" + " SELECT custkey custkey2, orderstatus orderstatus2, CAST(totalprice AS BIGINT) totalprice2, orderkey orderkey2\n" + " FROM orders\n" + " ) orders2 ON orders1.orderkey1 = orders2.orderkey2\n" + " ) \n" + " GROUP BY custkey1, orderstatus1\n" + ")\n" + "WHERE custkey1 = maxcustkey\n" + "AND maxcustkey % 2 = 0 \n" + "AND orderstatus1 = 'F'\n" + "AND totalprice > 10000\n" + "ORDER BY custkey1, orderstatus1, totalprice, maxcustkey"); } @Test public void testNonDeterministicJoinPredicatePushdown() throws Exception { MaterializedResult materializedResult = computeActual("" + "SELECT COUNT(*)\n" + "FROM (\n" + " SELECT DISTINCT *\n" + " FROM (\n" + " SELECT 'abc' as col1a, 500 as col1b FROM lineitem limit 1\n" + " ) table1\n" + " JOIN (\n" + " SELECT 'abc' as col2a FROM lineitem limit 1000000\n" + " ) table2\n" + " ON table1.col1a = table2.col2a\n" + " WHERE rand() * 1000 > table1.col1b\n" + ")"); MaterializedTuple tuple = Iterables.getOnlyElement(materializedResult.getMaterializedTuples()); Assert.assertEquals(tuple.getFieldCount(), 1); long count = (Long) tuple.getField(0); // Technically non-deterministic unit test but has essentially a next to impossible chance of a false positive Assert.assertTrue(count > 0 && count < 1000000); } @Test public void testTrivialNonDeterministicPredicatePushdown() throws Exception { assertQuery("SELECT COUNT(*) FROM dual WHERE rand() >= 0"); } @Test public void testNonDeterministicTableScanPredicatePushdown() throws Exception { MaterializedResult materializedResult = computeActual("" + "SELECT COUNT(*)\n" + "FROM (\n" + " SELECT *\n" + " FROM lineitem\n" + " LIMIT 1000\n" + ")\n" + "WHERE rand() > 0.5"); MaterializedTuple tuple = Iterables.getOnlyElement(materializedResult.getMaterializedTuples()); Assert.assertEquals(tuple.getFieldCount(), 1); long count = (Long) tuple.getField(0); // Technically non-deterministic unit test but has essentially a next to impossible chance of a false positive Assert.assertTrue(count > 0 && count < 1000); } @Test public void testNonDeterministicAggregationPredicatePushdown() throws Exception { MaterializedResult materializedResult = computeActual("" + "SELECT COUNT(*)\n" + "FROM (\n" + " SELECT orderkey, COUNT(*)\n" + " FROM lineitem\n" + " GROUP BY orderkey\n" + " LIMIT 1000\n" + ")\n" + "WHERE rand() > 0.5"); MaterializedTuple tuple = Iterables.getOnlyElement(materializedResult.getMaterializedTuples()); Assert.assertEquals(tuple.getFieldCount(), 1); long count = (Long) tuple.getField(0); // Technically non-deterministic unit test but has essentially a next to impossible chance of a false positive Assert.assertTrue(count > 0 && count < 1000); } @Test public void testSemiJoinPredicateMoveAround() throws Exception { assertQuery("" + "SELECT COUNT(*)\n" + "FROM (SELECT * FROM orders WHERE custkey % 2 = 0 AND orderkey % 3 = 0)\n" + "WHERE orderkey\n" + " IN (\n" + " SELECT CASE WHEN orderkey % 7 = 0 THEN NULL ELSE orderkey END\n" + " FROM lineitem\n" + " WHERE partkey % 2 = 0)\n" + " AND\n" + " orderkey % 2 = 0"); } @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "\\QUnexpected parameters (bigint) for function length. Expected: length(varchar)\\E") public void testFunctionNotRegistered() { computeActual("SELECT length(1)"); } @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "Types are not comparable with '<>': bigint vs varchar") public void testTypeMismatch() { computeActual("SELECT 1 <> 'x'"); } @BeforeClass(alwaysRun = true) public void setupDatabase() throws Exception { Logging.initialize(); handle = DBI.open("jdbc:h2:mem:test" + System.nanoTime()); RecordSet ordersRecords = readTpchRecords(TPCH_ORDERS_METADATA); handle.execute("CREATE TABLE orders (\n" + " orderkey BIGINT PRIMARY KEY,\n" + " custkey BIGINT NOT NULL,\n" + " orderstatus CHAR(1) NOT NULL,\n" + " totalprice DOUBLE NOT NULL,\n" + " orderdate CHAR(10) NOT NULL,\n" + " orderpriority CHAR(15) NOT NULL,\n" + " clerk CHAR(15) NOT NULL,\n" + " shippriority BIGINT NOT NULL,\n" + " comment VARCHAR(79) NOT NULL\n" + ")"); insertRows(TPCH_ORDERS_METADATA, handle, ordersRecords); RecordSet lineItemRecords = readTpchRecords(TPCH_LINEITEM_METADATA); handle.execute("CREATE TABLE lineitem (\n" + " orderkey BIGINT,\n" + " partkey BIGINT NOT NULL,\n" + " suppkey BIGINT NOT NULL,\n" + " linenumber BIGINT,\n" + " quantity BIGINT NOT NULL,\n" + " extendedprice DOUBLE NOT NULL,\n" + " discount DOUBLE NOT NULL,\n" + " tax DOUBLE NOT NULL,\n" + " returnflag CHAR(1) NOT NULL,\n" + " linestatus CHAR(1) NOT NULL,\n" + " shipdate CHAR(10) NOT NULL,\n" + " commitdate CHAR(10) NOT NULL,\n" + " receiptdate CHAR(10) NOT NULL,\n" + " shipinstruct VARCHAR(25) NOT NULL,\n" + " shipmode VARCHAR(10) NOT NULL,\n" + " comment VARCHAR(44) NOT NULL,\n" + " PRIMARY KEY (orderkey, linenumber)" + ")"); insertRows(TPCH_LINEITEM_METADATA, handle, lineItemRecords); setUpQueryFramework(TpchMetadata.TPCH_CATALOG_NAME, TpchMetadata.TPCH_SCHEMA_NAME); } @AfterClass(alwaysRun = true) public void cleanupDatabase() throws Exception { tearDownQueryFramework(); handle.close(); } protected abstract int getNodeCount(); protected abstract void setUpQueryFramework(String catalog, String schema) throws Exception; protected void tearDownQueryFramework() throws Exception { } protected abstract MaterializedResult computeActual(@Language("SQL") String sql); protected void assertQuery(@Language("SQL") String sql) throws Exception { assertQuery(sql, sql, false); } private void assertQueryOrdered(@Language("SQL") String sql) throws Exception { assertQuery(sql, sql, true); } protected void assertQuery(@Language("SQL") String actual, @Language("SQL") String expected) throws Exception { assertQuery(actual, expected, false); } private static final Logger log = Logger.get(AbstractTestQueries.class); private void assertQuery(@Language("SQL") String actual, @Language("SQL") String expected, boolean ensureOrdering) throws Exception { long start = System.nanoTime(); MaterializedResult actualResults = computeActual(actual); log.info("FINISHED in %s", Duration.nanosSince(start)); MaterializedResult expectedResults = computeExpected(expected, actualResults.getTupleInfo()); if (ensureOrdering) { assertEquals(actualResults.getMaterializedTuples(), expectedResults.getMaterializedTuples()); } else { assertEqualsIgnoreOrder(actualResults.getMaterializedTuples(), expectedResults.getMaterializedTuples()); } } public static void assertEqualsIgnoreOrder(Iterable<?> actual, Iterable<?> expected) { assertNotNull(actual, "actual is null"); assertNotNull(expected, "expected is null"); ImmutableMultiset<?> actualSet = ImmutableMultiset.copyOf(actual); ImmutableMultiset<?> expectedSet = ImmutableMultiset.copyOf(expected); if (!actualSet.equals(expectedSet)) { fail(format("not equal\nActual %s rows:\n %s\nExpected %s rows:\n %s\n", actualSet.size(), Joiner.on("\n ").join(Iterables.limit(actualSet, 100)), expectedSet.size(), Joiner.on("\n ").join(Iterables.limit(expectedSet, 100)))); } } private MaterializedResult computeExpected(@Language("SQL") final String sql, TupleInfo resultTupleInfo) { return new MaterializedResult( handle.createQuery(sql) .map(tupleMapper(resultTupleInfo)) .list(), resultTupleInfo ); } private static ResultSetMapper<Tuple> tupleMapper(final TupleInfo tupleInfo) { return new ResultSetMapper<Tuple>() { @Override public Tuple map(int index, ResultSet resultSet, StatementContext ctx) throws SQLException { List<TupleInfo.Type> types = tupleInfo.getTypes(); int count = resultSet.getMetaData().getColumnCount(); checkArgument(types.size() == count, "tuple info does not match result"); TupleInfo.Builder builder = tupleInfo.builder(); for (int i = 1; i <= count; i++) { TupleInfo.Type type = types.get(i - 1); switch (type) { case BOOLEAN: boolean booleanValue = resultSet.getBoolean(i); if (resultSet.wasNull()) { builder.appendNull(); } else { builder.append(booleanValue); } break; case FIXED_INT_64: long longValue = resultSet.getLong(i); if (resultSet.wasNull()) { builder.appendNull(); } else { builder.append(longValue); } break; case DOUBLE: double doubleValue = resultSet.getDouble(i); if (resultSet.wasNull()) { builder.appendNull(); } else { builder.append(doubleValue); } break; case VARIABLE_BINARY: String value = resultSet.getString(i); if (resultSet.wasNull()) { builder.appendNull(); } else { builder.append(Slices.wrappedBuffer(value.getBytes(UTF_8))); } break; default: throw new AssertionError("unhandled type: " + type); } } return builder.build(); } }; } private static void insertRows(TableMetadata tableMetadata, Handle handle, RecordSet data) { String vars = Joiner.on(',').join(nCopies(tableMetadata.getColumns().size(), "?")); String sql = format("INSERT INTO %s VALUES (%s)", tableMetadata.getTable().getTableName(), vars); RecordCursor cursor = data.cursor(); while (true) { // insert 1000 rows at a time PreparedBatch batch = handle.prepareBatch(sql); for (int row = 0; row < 1000; row++) { if (!cursor.advanceNextPosition()) { batch.execute(); return; } PreparedBatchPart part = batch.add(); for (int column = 0; column < tableMetadata.getColumns().size(); column++) { ColumnMetadata columnMetadata = tableMetadata.getColumns().get(column); switch (columnMetadata.getType()) { case BOOLEAN: part.bind(column, cursor.getBoolean(column)); break; case LONG: part.bind(column, cursor.getLong(column)); break; case DOUBLE: part.bind(column, cursor.getDouble(column)); break; case STRING: part.bind(column, new String(cursor.getString(column), UTF_8)); break; } } } batch.execute(); } } private Function<MaterializedTuple, String> onlyColumnGetter() { return new Function<MaterializedTuple, String>() { @Override public String apply(MaterializedTuple input) { assertEquals(input.getFieldCount(), 1); return (String) input.getField(0); } }; } private static String getExplainPlan(String query, ExplainType.Type planType) { QueryExplainer explainer = getQueryExplainer(); return explainer.getPlan((Query) SqlParser.createStatement(query), planType); } private static String getGraphvizExplainPlan(String query, ExplainType.Type planType) { QueryExplainer explainer = getQueryExplainer(); return explainer.getGraphvizPlan((Query) SqlParser.createStatement(query), planType); } private static QueryExplainer getQueryExplainer() { Session session = new Session("user", "test", DEFAULT_CATALOG, DEFAULT_SCHEMA, null, null); MetadataManager metadata = new MetadataManager(); metadata.addInternalSchemaMetadata(new DualMetadata()); List<PlanOptimizer> optimizers = new PlanOptimizersFactory(metadata).get(); return new QueryExplainer(session, optimizers, metadata, new MockPeriodicImportManager(), new MockStorageManager()); } }
presto-main/src/test/java/com/facebook/presto/AbstractTestQueries.java
package com.facebook.presto; import com.facebook.presto.connector.dual.DualMetadata; import com.facebook.presto.connector.informationSchema.InformationSchemaMetadata; import com.facebook.presto.importer.MockPeriodicImportManager; import com.facebook.presto.metadata.MetadataManager; import com.facebook.presto.spi.ColumnMetadata; import com.facebook.presto.spi.RecordCursor; import com.facebook.presto.spi.RecordSet; import com.facebook.presto.spi.TableMetadata; import com.facebook.presto.sql.analyzer.QueryExplainer; import com.facebook.presto.sql.analyzer.Session; import com.facebook.presto.sql.parser.SqlParser; import com.facebook.presto.sql.planner.PlanOptimizersFactory; import com.facebook.presto.sql.planner.optimizations.PlanOptimizer; import com.facebook.presto.sql.tree.ExplainType; import com.facebook.presto.sql.tree.Query; import com.facebook.presto.storage.MockStorageManager; import com.facebook.presto.tpch.TpchMetadata; import com.facebook.presto.tuple.Tuple; import com.facebook.presto.tuple.TupleInfo; import com.facebook.presto.util.MaterializedResult; import com.facebook.presto.util.MaterializedTuple; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableMultiset; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Multimap; import com.google.common.collect.Multimaps; import com.google.common.collect.Ordering; import io.airlift.log.Logger; import io.airlift.log.Logging; import io.airlift.slice.Slices; import io.airlift.units.Duration; import org.intellij.lang.annotations.Language; import org.skife.jdbi.v2.DBI; import org.skife.jdbi.v2.Handle; import org.skife.jdbi.v2.PreparedBatch; import org.skife.jdbi.v2.PreparedBatchPart; import org.skife.jdbi.v2.StatementContext; import org.skife.jdbi.v2.tweak.ResultSetMapper; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import java.sql.ResultSet; import java.sql.SQLException; import java.util.List; import static com.facebook.presto.sql.analyzer.Session.DEFAULT_CATALOG; import static com.facebook.presto.sql.analyzer.Session.DEFAULT_SCHEMA; import static com.facebook.presto.sql.tree.ExplainType.Type.DISTRIBUTED; import static com.facebook.presto.sql.tree.ExplainType.Type.LOGICAL; import static com.facebook.presto.tpch.TpchMetadata.TPCH_LINEITEM_METADATA; import static com.facebook.presto.tpch.TpchMetadata.TPCH_LINEITEM_NAME; import static com.facebook.presto.tpch.TpchMetadata.TPCH_ORDERS_METADATA; import static com.facebook.presto.tpch.TpchMetadata.TPCH_ORDERS_NAME; import static com.facebook.presto.tpch.TpchMetadata.TPCH_SCHEMA_NAME; import static com.facebook.presto.tuple.TupleInfo.Type.BOOLEAN; import static com.facebook.presto.tuple.TupleInfo.Type.DOUBLE; import static com.facebook.presto.tuple.TupleInfo.Type.FIXED_INT_64; import static com.facebook.presto.tuple.TupleInfo.Type.VARIABLE_BINARY; import static com.facebook.presto.util.InMemoryTpchBlocksProvider.readTpchRecords; import static com.facebook.presto.util.MaterializedResult.resultBuilder; import static com.google.common.base.Charsets.UTF_8; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.collect.Iterables.transform; import static java.lang.String.format; import static java.util.Collections.nCopies; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertTrue; import static org.testng.Assert.fail; public abstract class AbstractTestQueries { private Handle handle; @Test public void testMaxMinStringWithNulls() throws Exception { assertQuery("SELECT custkey, MAX(NULLIF(orderstatus, 'O')), MIN(NULLIF(orderstatus, 'O')) FROM orders GROUP BY custkey"); } @Test public void testApproxPercentile() throws Exception { MaterializedResult raw = computeActual("SELECT orderstatus, orderkey, totalprice FROM ORDERS"); Multimap<String, Long> orderKeyByStatus = ArrayListMultimap.create(); Multimap<String, Double> totalPriceByStatus = ArrayListMultimap.create(); for (MaterializedTuple tuple : raw.getMaterializedTuples()) { orderKeyByStatus.put((String) tuple.getField(0), (Long) tuple.getField(1)); totalPriceByStatus.put((String) tuple.getField(0), (Double) tuple.getField(2)); } MaterializedResult actual = computeActual("" + "SELECT orderstatus, " + " approx_percentile(orderkey, 0.5), " + " approx_percentile(totalprice, 0.5)," + " approx_percentile(orderkey, 2, 0.5)," + " approx_percentile(totalprice, 2, 0.5)\n" + "FROM ORDERS\n" + "GROUP BY orderstatus"); for (MaterializedTuple tuple : actual.getMaterializedTuples()) { String status = (String) tuple.getField(0); Long orderKey = (Long) tuple.getField(1); Double totalPrice = (Double) tuple.getField(2); Long orderKeyWeighted = (Long) tuple.getField(3); Double totalPriceWeighted = (Double) tuple.getField(4); List<Long> orderKeys = Ordering.natural().sortedCopy(orderKeyByStatus.get(status)); List<Double> totalPrices = Ordering.natural().sortedCopy(totalPriceByStatus.get(status)); // verify real rank of returned value is within 1% of requested rank assertTrue(orderKey >= orderKeys.get((int) (0.49 * orderKeys.size()))); assertTrue(orderKey <= orderKeys.get((int) (0.51 * orderKeys.size()))); assertTrue(orderKeyWeighted >= orderKeys.get((int) (0.49 * orderKeys.size()))); assertTrue(orderKeyWeighted <= orderKeys.get((int) (0.51 * orderKeys.size()))); assertTrue(totalPrice >= totalPrices.get((int) (0.49 * totalPrices.size()))); assertTrue(totalPrice <= totalPrices.get((int) (0.51 * totalPrices.size()))); assertTrue(totalPriceWeighted >= totalPrices.get((int) (0.49 * totalPrices.size()))); assertTrue(totalPriceWeighted <= totalPrices.get((int) (0.51 * totalPrices.size()))); } } @Test public void testComplexQuery() throws Exception { MaterializedResult actual = computeActual("SELECT sum(orderkey), row_number() OVER (ORDER BY orderkey)\n" + "FROM orders\n" + "WHERE orderkey <= 10\n" + "GROUP BY orderkey\n" + "HAVING sum(orderkey) >= 3\n" + "ORDER BY orderkey DESC\n" + "LIMIT 3"); MaterializedResult expected = resultBuilder(FIXED_INT_64, FIXED_INT_64) .row(7, 5) .row(6, 4) .row(5, 3) .build(); assertEquals(actual, expected); } @Test public void testWhereNull() throws Exception { // This query is has this strange shape to force the compiler to leave a true on the stack // with the null flag set so if the filter method is not handling nulls correctly, this // query will fail assertQuery("SELECT custkey FROM orders WHERE custkey = custkey AND cast(nullif(custkey, custkey) as boolean) AND cast(nullif(custkey, custkey) as boolean)"); } @Test public void testSumOfNulls() throws Exception { assertQuery("SELECT orderstatus, sum(CAST(NULL AS BIGINT)) FROM orders GROUP BY orderstatus"); } @Test public void testApproximateCountDistinct() throws Exception { MaterializedResult actual = computeActual("SELECT approx_distinct(custkey) FROM orders"); MaterializedResult expected = resultBuilder(FIXED_INT_64) .row(971) .build(); assertEqualsIgnoreOrder(actual.getMaterializedTuples(), expected.getMaterializedTuples()); } @Test public void testApproximateCountDistinctGroupBy() throws Exception { MaterializedResult actual = computeActual("SELECT orderstatus, approx_distinct(custkey) FROM orders GROUP BY orderstatus"); MaterializedResult expected = resultBuilder(actual.getTupleInfo()) .row("O", 969) .row("F", 964) .row("P", 301) .build(); assertEqualsIgnoreOrder(actual.getMaterializedTuples(), expected.getMaterializedTuples()); } @Test public void testCountBoolean() throws Exception { assertQuery("SELECT COUNT(true) FROM orders"); } @Test public void testJoinWithMultiFieldGroupBy() throws Exception { assertQuery("SELECT orderstatus FROM lineitem JOIN (SELECT DISTINCT orderkey, orderstatus FROM ORDERS) T on lineitem.orderkey = T.orderkey"); } @Test public void testGroupByRepeatedField() throws Exception { assertQuery("SELECT sum(custkey) FROM orders GROUP BY orderstatus, orderstatus"); } @Test public void testGroupByRepeatedField2() throws Exception { assertQuery("SELECT count(*) FROM (select orderstatus a, orderstatus b FROM orders) GROUP BY a, b"); } @Test public void testGroupByMultipleFieldsWithPredicateOnAggregationArgument() throws Exception { assertQuery("SELECT custkey, orderstatus, MAX(orderkey) FROM ORDERS WHERE orderkey = 1 GROUP BY custkey, orderstatus"); } @Test public void testReorderOutputsOfGroupByAggregation() throws Exception { assertQuery( "SELECT orderstatus, a, custkey, b FROM (SELECT custkey, orderstatus, -COUNT(*) a, MAX(orderkey) b FROM ORDERS WHERE orderkey = 1 GROUP BY custkey, orderstatus) T"); } @Test public void testGroupAggregationOverNestedGroupByAggregation() throws Exception { assertQuery("SELECT sum(custkey), max(orderstatus), min(c) FROM (SELECT orderstatus, custkey, COUNT(*) c FROM ORDERS GROUP BY orderstatus, custkey) T"); } @Test public void testDistinctMultipleFields() throws Exception { assertQuery("SELECT DISTINCT custkey, orderstatus FROM ORDERS"); } @Test public void testArithmeticNegation() throws Exception { assertQuery("SELECT -custkey FROM orders"); } @Test public void testDistinct() throws Exception { assertQuery("SELECT DISTINCT custkey FROM orders"); } // TODO: we need to properly propagate exceptions with their actual classes @Test(expectedExceptions = Exception.class, expectedExceptionsMessageRegExp = "DISTINCT in aggregation parameters not yet supported") public void testCountDistinct() throws Exception { assertQuery("SELECT COUNT(DISTINCT custkey) FROM orders"); } @Test public void testDistinctWithOrderBy() throws Exception { assertQueryOrdered("SELECT DISTINCT custkey FROM orders ORDER BY custkey LIMIT 10"); } @Test(expectedExceptions = Exception.class, expectedExceptionsMessageRegExp = "For SELECT DISTINCT, ORDER BY expressions must appear in select list") public void testDistinctWithOrderByNotInSelect() throws Exception { assertQueryOrdered("SELECT DISTINCT custkey FROM orders ORDER BY orderkey LIMIT 10"); } @Test public void testOrderByLimit() throws Exception { assertQueryOrdered("SELECT custkey, orderstatus FROM ORDERS ORDER BY orderkey DESC LIMIT 10"); } @Test public void testOrderByExpressionWithLimit() throws Exception { assertQueryOrdered("SELECT custkey, orderstatus FROM ORDERS ORDER BY orderkey + 1 DESC LIMIT 10"); } @Test public void testGroupByOrderByLimit() throws Exception { assertQueryOrdered("SELECT custkey, SUM(totalprice) FROM ORDERS GROUP BY custkey ORDER BY SUM(totalprice) DESC LIMIT 10"); } @Test public void testLimitZero() throws Exception { assertQuery("SELECT custkey, totalprice FROM orders LIMIT 0"); } @Test public void testRepeatedAggregations() throws Exception { assertQuery("SELECT SUM(orderkey), SUM(orderkey) FROM ORDERS"); } @Test public void testRepeatedOutputs() throws Exception { assertQuery("SELECT orderkey a, orderkey b FROM ORDERS WHERE orderstatus = 'F'"); } @Test public void testLimit() throws Exception { MaterializedResult actual = computeActual("SELECT orderkey FROM ORDERS LIMIT 10"); MaterializedResult all = computeExpected("SELECT orderkey FROM ORDERS", actual.getTupleInfo()); assertEquals(actual.getMaterializedTuples().size(), 10); assertTrue(all.getMaterializedTuples().containsAll(actual.getMaterializedTuples())); } @Test public void testAggregationWithLimit() throws Exception { MaterializedResult actual = computeActual("SELECT custkey, SUM(totalprice) FROM ORDERS GROUP BY custkey LIMIT 10"); MaterializedResult all = computeExpected("SELECT custkey, SUM(totalprice) FROM ORDERS GROUP BY custkey", actual.getTupleInfo()); assertEquals(actual.getMaterializedTuples().size(), 10); assertTrue(all.getMaterializedTuples().containsAll(actual.getMaterializedTuples())); } @Test public void testLimitInInlineView() throws Exception { MaterializedResult actual = computeActual("SELECT orderkey FROM (SELECT orderkey FROM ORDERS LIMIT 100) T LIMIT 10"); MaterializedResult all = computeExpected("SELECT orderkey FROM ORDERS", actual.getTupleInfo()); assertEquals(actual.getMaterializedTuples().size(), 10); assertTrue(all.getMaterializedTuples().containsAll(actual.getMaterializedTuples())); } @Test public void testCountAll() throws Exception { assertQuery("SELECT COUNT(*) FROM ORDERS"); } @Test public void testCountColumn() throws Exception { assertQuery("SELECT COUNT(orderkey) FROM ORDERS"); assertQuery("SELECT COUNT(orderstatus) FROM ORDERS"); assertQuery("SELECT COUNT(orderdate) FROM ORDERS"); assertQuery("SELECT COUNT(1) FROM ORDERS"); assertQuery("SELECT COUNT(NULLIF(orderstatus, 'F')) FROM ORDERS"); assertQuery("SELECT COUNT(CAST(NULL AS BIGINT)) FROM ORDERS"); // todo: make COUNT(null) work } @Test public void testWildcard() throws Exception { assertQuery("SELECT * FROM ORDERS"); } @Test public void testMultipleWildcards() throws Exception { assertQuery("SELECT *, 123, * FROM ORDERS"); } @Test public void testMixedWildcards() throws Exception { assertQuery("SELECT *, orders.*, orderkey FROM orders"); } @Test public void testQualifiedWildcardFromAlias() throws Exception { assertQuery("SELECT T.* FROM ORDERS T"); } @Test public void testQualifiedWildcardFromInlineView() throws Exception { assertQuery("SELECT T.* FROM (SELECT orderkey + custkey FROM ORDERS) T"); } @Test public void testQualifiedWildcard() throws Exception { assertQuery("SELECT ORDERS.* FROM ORDERS"); } @Test public void testAverageAll() throws Exception { assertQuery("SELECT AVG(totalprice) FROM ORDERS"); } @Test public void testVariance() throws Exception { // int64 assertQuery("SELECT VAR_SAMP(custkey) FROM ORDERS"); assertQuery("SELECT VAR_SAMP(custkey) FROM (SELECT custkey FROM ORDERS ORDER BY custkey LIMIT 2) T"); assertQuery("SELECT VAR_SAMP(custkey) FROM (SELECT custkey FROM ORDERS ORDER BY custkey LIMIT 1) T"); assertQuery("SELECT VAR_SAMP(custkey) FROM (SELECT custkey FROM ORDERS LIMIT 0) T"); // double assertQuery("SELECT VAR_SAMP(totalprice) FROM ORDERS"); assertQuery("SELECT VAR_SAMP(totalprice) FROM (SELECT totalprice FROM ORDERS ORDER BY totalprice LIMIT 2) T"); assertQuery("SELECT VAR_SAMP(totalprice) FROM (SELECT totalprice FROM ORDERS ORDER BY totalprice LIMIT 1) T"); assertQuery("SELECT VAR_SAMP(totalprice) FROM (SELECT totalprice FROM ORDERS LIMIT 0) T"); } @Test public void testVariancePop() throws Exception { // int64 assertQuery("SELECT VAR_POP(custkey) FROM ORDERS"); assertQuery("SELECT VAR_POP(custkey) FROM (SELECT custkey FROM ORDERS ORDER BY custkey LIMIT 2) T"); assertQuery("SELECT VAR_POP(custkey) FROM (SELECT custkey FROM ORDERS ORDER BY custkey LIMIT 1) T"); assertQuery("SELECT VAR_POP(custkey) FROM (SELECT custkey FROM ORDERS LIMIT 0) T"); // double assertQuery("SELECT VAR_POP(totalprice) FROM ORDERS"); assertQuery("SELECT VAR_POP(totalprice) FROM (SELECT totalprice FROM ORDERS ORDER BY totalprice LIMIT 2) T"); assertQuery("SELECT VAR_POP(totalprice) FROM (SELECT totalprice FROM ORDERS ORDER BY totalprice LIMIT 1) T"); assertQuery("SELECT VAR_POP(totalprice) FROM (SELECT totalprice FROM ORDERS LIMIT 0) T"); } @Test public void testStdDev() throws Exception { // int64 assertQuery("SELECT STDDEV_SAMP(custkey) FROM ORDERS"); assertQuery("SELECT STDDEV_SAMP(custkey) FROM (SELECT custkey FROM ORDERS ORDER BY custkey LIMIT 2) T"); assertQuery("SELECT STDDEV_SAMP(custkey) FROM (SELECT custkey FROM ORDERS ORDER BY custkey LIMIT 1) T"); assertQuery("SELECT STDDEV_SAMP(custkey) FROM (SELECT custkey FROM ORDERS LIMIT 0) T"); // double assertQuery("SELECT STDDEV_SAMP(totalprice) FROM ORDERS"); assertQuery("SELECT STDDEV_SAMP(totalprice) FROM (SELECT totalprice FROM ORDERS ORDER BY totalprice LIMIT 2) T"); assertQuery("SELECT STDDEV_SAMP(totalprice) FROM (SELECT totalprice FROM ORDERS ORDER BY totalprice LIMIT 1) T"); assertQuery("SELECT STDDEV_SAMP(totalprice) FROM (SELECT totalprice FROM ORDERS LIMIT 0) T"); } @Test public void testStdDevPop() throws Exception { // int64 assertQuery("SELECT STDDEV_POP(custkey) FROM ORDERS"); assertQuery("SELECT STDDEV_POP(custkey) FROM (SELECT custkey FROM ORDERS ORDER BY custkey LIMIT 2) T"); assertQuery("SELECT STDDEV_POP(custkey) FROM (SELECT custkey FROM ORDERS ORDER BY custkey LIMIT 1) T"); assertQuery("SELECT STDDEV_POP(custkey) FROM (SELECT custkey FROM ORDERS LIMIT 0) T"); // double assertQuery("SELECT STDDEV_POP(totalprice) FROM ORDERS"); assertQuery("SELECT STDDEV_POP(totalprice) FROM (SELECT totalprice FROM ORDERS ORDER BY totalprice LIMIT 2) T"); assertQuery("SELECT STDDEV_POP(totalprice) FROM (SELECT totalprice FROM ORDERS ORDER BY totalprice LIMIT 1) T"); assertQuery("SELECT STDDEV_POP(totalprice) FROM (SELECT totalprice FROM ORDERS LIMIT 0) T"); } @Test public void testCountAllWithPredicate() throws Exception { assertQuery("SELECT COUNT(*) FROM ORDERS WHERE orderstatus = 'F'"); } @Test public void testGroupByNoAggregations() throws Exception { assertQuery("SELECT custkey FROM ORDERS GROUP BY custkey"); } @Test public void testGroupByCount() throws Exception { assertQuery( "SELECT orderstatus, COUNT(*) FROM ORDERS GROUP BY orderstatus", "SELECT orderstatus, CAST(COUNT(*) AS INTEGER) FROM orders GROUP BY orderstatus" ); } @Test public void testGroupByMultipleFields() throws Exception { assertQuery("SELECT custkey, orderstatus, COUNT(*) FROM ORDERS GROUP BY custkey, orderstatus"); } @Test public void testGroupByWithAlias() throws Exception { assertQuery( "SELECT orderdate x, COUNT(*) FROM orders GROUP BY orderdate", "SELECT orderdate x, CAST(COUNT(*) AS INTEGER) FROM orders GROUP BY orderdate" ); } @Test public void testGroupBySum() throws Exception { assertQuery("SELECT orderstatus, SUM(totalprice) FROM ORDERS GROUP BY orderstatus"); } @Test public void testGroupByWithWildcard() throws Exception { assertQuery("SELECT * FROM (SELECT orderkey FROM orders) t GROUP BY orderkey"); } @Test public void testCountAllWithComparison() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem WHERE tax < discount"); } @Test public void testSelectWithComparison() throws Exception { assertQuery("SELECT orderkey FROM lineitem WHERE tax < discount"); } @Test public void testCountWithNotPredicate() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem WHERE NOT tax < discount"); } @Test public void testCountWithNullPredicate() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem WHERE NULL"); } @Test public void testCountWithIsNullPredicate() throws Exception { assertQuery( "SELECT COUNT(*) FROM orders WHERE NULLIF(orderstatus, 'F') IS NULL", "SELECT COUNT(*) FROM orders WHERE orderstatus = 'F' " ); } @Test public void testCountWithIsNotNullPredicate() throws Exception { assertQuery( "SELECT COUNT(*) FROM orders WHERE NULLIF(orderstatus, 'F') IS NOT NULL", "SELECT COUNT(*) FROM orders WHERE orderstatus <> 'F' " ); } @Test public void testCountWithNullIfPredicate() throws Exception { assertQuery("SELECT COUNT(*) FROM orders WHERE NULLIF(orderstatus, 'F') = orderstatus "); } @Test public void testCountWithCoalescePredicate() throws Exception { assertQuery( "SELECT COUNT(*) FROM orders WHERE COALESCE(NULLIF(orderstatus, 'F'), 'bar') = 'bar'", "SELECT COUNT(*) FROM orders WHERE orderstatus = 'F'" ); } @Test public void testCountWithAndPredicate() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem WHERE tax < discount AND tax > 0.01 AND discount < 0.05"); } @Test public void testCountWithOrPredicate() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem WHERE tax < 0.01 OR discount > 0.05"); } @Test public void testCountWithInlineView() throws Exception { assertQuery("SELECT COUNT(*) FROM (SELECT orderkey FROM lineitem) x"); } @Test public void testNestedCount() throws Exception { assertQuery("SELECT COUNT(*) FROM (SELECT orderkey, COUNT(*) FROM lineitem GROUP BY orderkey) x"); } @Test public void testAggregationWithProjection() throws Exception { assertQuery("SELECT sum(totalprice * 2) - sum(totalprice) FROM orders"); } @Test public void testAggregationWithProjection2() throws Exception { assertQuery("SELECT sum(totalprice * 2) + sum(totalprice * 2) FROM orders"); } @Test public void testInlineView() throws Exception { assertQuery("SELECT orderkey, custkey FROM (SELECT orderkey, custkey FROM ORDERS) U"); } @Test public void testAliasedInInlineView() throws Exception { assertQuery("SELECT x, y FROM (SELECT orderkey x, custkey y FROM ORDERS) U"); } @Test public void testInlineViewWithProjections() throws Exception { assertQuery("SELECT x + 1, y FROM (SELECT orderkey * 10 x, custkey y FROM ORDERS) u"); } @Test public void testGroupByWithoutAggregation() throws Exception { assertQuery("SELECT orderstatus FROM orders GROUP BY orderstatus"); } @Test public void testHistogram() throws Exception { assertQuery("SELECT lines, COUNT(*) FROM (SELECT orderkey, COUNT(*) lines FROM lineitem GROUP BY orderkey) U GROUP BY lines"); } @Test public void testSimpleJoin() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem JOIN orders ON lineitem.orderkey = orders.orderkey"); } @Test public void testJoinWithRightConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem JOIN orders ON lineitem.orderkey = 2"); } @Test public void testJoinWithLeftConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem JOIN orders ON orders.orderkey = 2"); } @Test public void testSimpleJoinWithLeftConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem JOIN orders ON lineitem.orderkey = orders.orderkey AND orders.orderkey = 2"); } @Test public void testSimpleJoinWithRightConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem JOIN orders ON lineitem.orderkey = orders.orderkey AND lineitem.orderkey = 2"); } @Test public void testJoinDoubleClauseWithLeftOverlap() throws Exception { // Checks to make sure that we properly handle duplicate field references in join clauses assertQuery("SELECT COUNT(*) FROM lineitem JOIN orders ON lineitem.orderkey = orders.orderkey AND lineitem.orderkey = orders.custkey"); } @Test public void testJoinDoubleClauseWithRightOverlap() throws Exception { // Checks to make sure that we properly handle duplicate field references in join clauses assertQuery("SELECT COUNT(*) FROM lineitem JOIN orders ON lineitem.orderkey = orders.orderkey AND orders.orderkey = lineitem.partkey"); } @Test public void testJoinWithAlias() throws Exception { assertQuery("SELECT * FROM (lineitem JOIN orders ON lineitem.orderkey = orders.orderkey) x"); } @Test public void testJoinWithConstantExpression() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem JOIN orders ON lineitem.orderkey = orders.orderkey AND 123 = 123"); } @Test(expectedExceptions = Exception.class, expectedExceptionsMessageRegExp = ".*not supported.*") public void testJoinOnConstantExpression() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem JOIN orders ON 123 = 123"); } @Test public void testJoinUsing() throws Exception { assertQuery( "SELECT COUNT(*) FROM lineitem join orders using (orderkey)", "SELECT COUNT(*) FROM lineitem join orders on lineitem.orderkey = orders.orderkey" ); } @Test public void testJoinWithReversedComparison() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem JOIN orders ON orders.orderkey = lineitem.orderkey"); } @Test public void testJoinWithComplexExpressions() throws Exception { assertQuery("SELECT SUM(custkey) FROM lineitem JOIN orders ON lineitem.orderkey = CAST(orders.orderkey AS BIGINT)"); } @Test public void testJoinWithComplexExpressions2() throws Exception { assertQuery( "SELECT SUM(custkey) FROM lineitem JOIN orders ON lineitem.orderkey = CASE WHEN orders.custkey = 1 and orders.orderstatus = 'F' THEN orders.orderkey ELSE NULL END"); } @Test public void testJoinWithComplexExpressions3() throws Exception { assertQuery( "SELECT SUM(custkey) FROM lineitem JOIN orders ON lineitem.orderkey + 1 = orders.orderkey + 1", "SELECT SUM(custkey) FROM lineitem JOIN orders ON lineitem.orderkey = orders.orderkey " // H2 takes a million years because it can't join efficiently on a non-indexed field/expression ); } @Test public void testSelfJoin() throws Exception { assertQuery("SELECT COUNT(*) FROM orders a JOIN orders b on a.orderkey = b.orderkey"); } @Test public void testWildcardFromJoin() throws Exception { assertQuery( "SELECT * FROM (select orderkey, partkey from lineitem) a join (select orderkey, custkey from orders) b using (orderkey)", "SELECT * FROM (select orderkey, partkey from lineitem) a join (select orderkey, custkey from orders) b on a.orderkey = b.orderkey" ); } @Test public void testQualifiedWildcardFromJoin() throws Exception { assertQuery( "SELECT a.*, b.* FROM (select orderkey, partkey from lineitem) a join (select orderkey, custkey from orders) b using (orderkey)", "SELECT a.*, b.* FROM (select orderkey, partkey from lineitem) a join (select orderkey, custkey from orders) b on a.orderkey = b.orderkey" ); } @Test public void testJoinAggregations() throws Exception { assertQuery( "SELECT x + y FROM (" + " SELECT orderdate, COUNT(*) x FROM orders GROUP BY orderdate) a JOIN (" + " SELECT orderdate, COUNT(*) y FROM orders GROUP BY orderdate) b ON a.orderdate = b.orderdate"); } @Test public void testJoinOnMultipleFields() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem JOIN orders ON lineitem.orderkey = orders.orderkey AND lineitem.shipdate = orders.orderdate"); } @Test public void testJoinUsingMultipleFields() throws Exception { assertQuery( "SELECT COUNT(*) FROM lineitem JOIN (SELECT orderkey, orderdate shipdate FROM ORDERS) T USING (orderkey, shipdate)", "SELECT COUNT(*) FROM lineitem JOIN orders ON lineitem.orderkey = orders.orderkey AND lineitem.shipdate = orders.orderdate" ); } @Test public void testJoinWithNonJoinExpression() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem JOIN orders ON lineitem.orderkey = orders.orderkey AND orders.custkey = 1"); } @Test public void testLeftFilteredJoin() throws Exception { // Test predicate move around assertQuery("SELECT custkey, linestatus, tax, totalprice, orderstatus FROM (SELECT * FROM lineitem WHERE orderkey % 2 = 0) a JOIN orders ON a.orderkey = orders.orderkey"); } @Test public void testRightFilteredJoin() throws Exception { // Test predicate move around assertQuery("SELECT custkey, linestatus, tax, totalprice, orderstatus FROM lineitem JOIN (SELECT * FROM orders WHERE orderkey % 2 = 0) a ON lineitem.orderkey = a.orderkey"); } @Test public void testJoinWithFullyPushedDownJoinClause() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem JOIN orders ON orders.custkey = 1 AND lineitem.orderkey = 1"); } @Test public void testJoinPredicateMoveAround() throws Exception { assertQuery("SELECT COUNT(*)\n" + "FROM (SELECT * FROM lineitem WHERE orderkey % 16 = 0 AND partkey % 2 = 0) lineitem\n" + "JOIN (SELECT * FROM orders WHERE orderkey % 16 = 0 AND custkey % 2 = 0) orders\n" + "ON lineitem.orderkey % 8 = orders.orderkey % 8 AND lineitem.linenumber % 2 = 0\n" + "WHERE orders.custkey % 8 < 7 AND orders.custkey % 8 = lineitem.orderkey % 8 AND lineitem.suppkey % 7 > orders.custkey % 7"); } @Test public void testSimpleLeftJoin() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem LEFT JOIN orders ON lineitem.orderkey = orders.orderkey"); assertQuery("SELECT COUNT(*) FROM lineitem LEFT OUTER JOIN orders ON lineitem.orderkey = orders.orderkey"); } @Test public void testLeftJoinNormalizedToInner() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem LEFT JOIN orders ON lineitem.orderkey = orders.orderkey WHERE orders.orderkey IS NOT NULL"); } @Test public void testLeftJoinWithRightConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM (SELECT * FROM lineitem WHERE orderkey % 1024 = 0) lineitem LEFT JOIN orders ON lineitem.orderkey = 1024"); } @Test public void testLeftJoinWithLeftConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM (SELECT * FROM lineitem WHERE orderkey % 1024 = 0) lineitem LEFT JOIN orders ON orders.orderkey = 1024"); } @Test public void testSimpleLeftJoinWithLeftConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem LEFT JOIN orders ON lineitem.orderkey = orders.orderkey AND orders.orderkey = 2"); } @Test public void testSimpleLeftJoinWithRightConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem LEFT JOIN orders ON lineitem.orderkey = orders.orderkey AND lineitem.orderkey = 2"); } @Test public void testDoubleFilteredLeftJoinWithRightConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM (SELECT * FROM lineitem WHERE orderkey % 1024 = 0) lineitem LEFT JOIN (SELECT * FROM orders WHERE orderkey % 1024 = 0) orders ON orders.orderkey = 1024"); } @Test public void testDoubleFilteredLeftJoinWithLeftConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM (SELECT * FROM lineitem WHERE orderkey % 1024 = 0) lineitem LEFT JOIN (SELECT * FROM orders WHERE orderkey % 1024 = 0) orders ON lineitem.orderkey = 1024"); } @Test public void testLeftJoinDoubleClauseWithLeftOverlap() throws Exception { // Checks to make sure that we properly handle duplicate field references in join clauses assertQuery("SELECT COUNT(*) FROM lineitem LEFT JOIN orders ON lineitem.orderkey = orders.orderkey AND lineitem.orderkey = orders.custkey"); } @Test public void testLeftJoinDoubleClauseWithRightOverlap() throws Exception { // Checks to make sure that we properly handle duplicate field references in join clauses assertQuery("SELECT COUNT(*) FROM lineitem LEFT JOIN orders ON lineitem.orderkey = orders.orderkey AND orders.orderkey = lineitem.partkey"); } @Test public void testBuildFilteredLeftJoin() throws Exception { assertQuery("SELECT * FROM lineitem LEFT JOIN (SELECT * FROM orders WHERE orderkey % 2 = 0) a ON lineitem.orderkey = a.orderkey"); } @Test public void testProbeFilteredLeftJoin() throws Exception { assertQuery("SELECT * FROM (SELECT * FROM lineitem WHERE orderkey % 2 = 0) a LEFT JOIN orders ON a.orderkey = orders.orderkey"); } @Test public void testLeftJoinPredicateMoveAround() throws Exception { assertQuery("SELECT COUNT(*)\n" + "FROM (SELECT * FROM lineitem WHERE orderkey % 16 = 0 AND partkey % 2 = 0) lineitem\n" + "LEFT JOIN (SELECT * FROM orders WHERE orderkey % 16 = 0 AND custkey % 2 = 0) orders\n" + "ON lineitem.orderkey % 8 = orders.orderkey % 8\n" + "WHERE (orders.custkey % 8 < 7 OR orders.custkey % 8 IS NULL) AND orders.custkey % 8 = lineitem.orderkey % 8"); } @Test public void testLeftJoinEqualityInference() throws Exception { // Test that we can infer orders.orderkey % 4 = orders.custkey % 3 on the inner side assertQuery("SELECT COUNT(*)\n" + "FROM (SELECT * FROM lineitem WHERE orderkey % 4 = 0 AND suppkey % 2 = partkey % 2 AND linenumber % 3 = orderkey % 3) lineitem\n" + "LEFT JOIN (SELECT * FROM orders WHERE orderkey % 4 = 0) orders\n" + "ON lineitem.linenumber % 3 = orders.orderkey % 4 AND lineitem.orderkey % 3 = orders.custkey % 3\n" + "WHERE lineitem.suppkey % 2 = lineitem.linenumber % 3"); } @Test public void testSimpleRightJoin() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem RIGHT JOIN orders ON lineitem.orderkey = orders.orderkey"); assertQuery("SELECT COUNT(*) FROM lineitem RIGHT OUTER JOIN orders ON lineitem.orderkey = orders.orderkey"); } @Test public void testRightJoinNormalizedToInner() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem RIGHT JOIN orders ON lineitem.orderkey = orders.orderkey WHERE lineitem.orderkey IS NOT NULL"); } @Test public void testRightJoinWithRightConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM (SELECT * FROM lineitem WHERE orderkey % 1024 = 0) lineitem RIGHT JOIN orders ON lineitem.orderkey = 1024"); } @Test public void testRightJoinWithLeftConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM (SELECT * FROM lineitem WHERE orderkey % 1024 = 0) lineitem RIGHT JOIN orders ON orders.orderkey = 1024"); } @Test public void testDoubleFilteredRightJoinWithRightConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM (SELECT * FROM lineitem WHERE orderkey % 1024 = 0) lineitem RIGHT JOIN (SELECT * FROM orders WHERE orderkey % 1024 = 0) orders ON orders.orderkey = 1024"); } @Test public void testDoubleFilteredRightJoinWithLeftConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM (SELECT * FROM lineitem WHERE orderkey % 1024 = 0) lineitem RIGHT JOIN (SELECT * FROM orders WHERE orderkey % 1024 = 0) orders ON lineitem.orderkey = 1024"); } @Test public void testSimpleRightJoinWithLeftConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem RIGHT JOIN orders ON lineitem.orderkey = orders.orderkey AND orders.orderkey = 2"); } @Test public void testSimpleRightJoinWithRightConstantEquality() throws Exception { assertQuery("SELECT COUNT(*) FROM lineitem RIGHT JOIN orders ON lineitem.orderkey = orders.orderkey AND lineitem.orderkey = 2"); } @Test public void testRightJoinDoubleClauseWithLeftOverlap() throws Exception { // Checks to make sure that we properly handle duplicate field references in join clauses assertQuery("SELECT COUNT(*) FROM lineitem RIGHT JOIN orders ON lineitem.orderkey = orders.orderkey AND lineitem.orderkey = orders.custkey"); } @Test public void testRightJoinDoubleClauseWithRightOverlap() throws Exception { // Checks to make sure that we properly handle duplicate field references in join clauses assertQuery("SELECT COUNT(*) FROM lineitem RIGHT JOIN orders ON lineitem.orderkey = orders.orderkey AND orders.orderkey = lineitem.partkey"); } @Test public void testBuildFilteredRightJoin() throws Exception { assertQuery("SELECT custkey, linestatus, tax, totalprice, orderstatus FROM (SELECT * FROM lineitem WHERE orderkey % 2 = 0) a RIGHT JOIN orders ON a.orderkey = orders.orderkey"); } @Test public void testProbeFilteredRightJoin() throws Exception { assertQuery("SELECT custkey, linestatus, tax, totalprice, orderstatus FROM lineitem RIGHT JOIN (SELECT * FROM orders WHERE orderkey % 2 = 0) a ON lineitem.orderkey = a.orderkey"); } @Test public void testRightJoinPredicateMoveAround() throws Exception { assertQuery("SELECT COUNT(*)\n" + "FROM (SELECT * FROM orders WHERE orderkey % 16 = 0 AND custkey % 2 = 0) orders\n" + "RIGHT JOIN (SELECT * FROM lineitem WHERE orderkey % 16 = 0 AND partkey % 2 = 0) lineitem\n" + "ON lineitem.orderkey % 8 = orders.orderkey % 8\n" + "WHERE (orders.custkey % 8 < 7 OR orders.custkey % 8 IS NULL) AND orders.custkey % 8 = lineitem.orderkey % 8"); } @Test public void testRightJoinEqualityInference() throws Exception { // Test that we can infer orders.orderkey % 4 = orders.custkey % 3 on the inner side assertQuery("SELECT COUNT(*)\n" + "FROM (SELECT * FROM orders WHERE orderkey % 4 = 0) orders\n" + "RIGHT JOIN (SELECT * FROM lineitem WHERE orderkey % 4 = 0 AND suppkey % 2 = partkey % 2 AND linenumber % 3 = orderkey % 3) lineitem\n" + "ON lineitem.linenumber % 3 = orders.orderkey % 4 AND lineitem.orderkey % 3 = orders.custkey % 3\n" + "WHERE lineitem.suppkey % 2 = lineitem.linenumber % 3"); } @Test public void testOrderBy() throws Exception { assertQueryOrdered("SELECT orderstatus FROM orders ORDER BY orderstatus"); } @Test public void testOrderBy2() throws Exception { assertQueryOrdered("SELECT orderstatus FROM orders ORDER BY orderkey DESC"); } @Test public void testOrderByMultipleFields() throws Exception { assertQueryOrdered("SELECT custkey, orderstatus FROM orders ORDER BY custkey DESC, orderstatus"); } @Test public void testOrderByAlias() throws Exception { assertQueryOrdered("SELECT orderstatus x FROM orders ORDER BY x ASC"); } @Test public void testOrderByAliasWithSameNameAsUnselectedColumn() throws Exception { assertQueryOrdered("SELECT orderstatus orderdate FROM orders ORDER BY orderdate ASC"); } @Test public void testOrderByOrdinal() throws Exception { assertQueryOrdered("SELECT orderstatus, orderdate FROM orders ORDER BY 2, 1"); } @Test public void testOrderByOrdinalWithWildcard() throws Exception { assertQueryOrdered("SELECT * FROM orders ORDER BY 1"); } @Test public void testGroupByOrdinal() throws Exception { assertQuery( "SELECT orderstatus, sum(totalprice) FROM orders GROUP BY 1", "SELECT orderstatus, sum(totalprice) FROM orders GROUP BY orderstatus"); } @Test public void testGroupBySearchedCase() throws Exception { assertQuery("SELECT CASE WHEN orderstatus = 'O' THEN 'a' ELSE 'b' END, count(*)\n" + "FROM orders\n" + "GROUP BY CASE WHEN orderstatus = 'O' THEN 'a' ELSE 'b' END"); assertQuery( "SELECT CASE WHEN orderstatus = 'O' THEN 'a' ELSE 'b' END, count(*)\n" + "FROM orders\n" + "GROUP BY 1", "SELECT CASE WHEN orderstatus = 'O' THEN 'a' ELSE 'b' END, count(*)\n" + "FROM orders\n" + "GROUP BY CASE WHEN orderstatus = 'O' THEN 'a' ELSE 'b' END"); } @Test public void testGroupBySearchedCaseNoElse() throws Exception { // whole CASE in group by clause assertQuery("SELECT CASE WHEN orderstatus = 'O' THEN 'a' END, count(*)\n" + "FROM orders\n" + "GROUP BY CASE WHEN orderstatus = 'O' THEN 'a' END"); assertQuery( "SELECT CASE WHEN orderstatus = 'O' THEN 'a' END, count(*)\n" + "FROM orders\n" + "GROUP BY 1", "SELECT CASE WHEN orderstatus = 'O' THEN 'a' END, count(*)\n" + "FROM orders\n" + "GROUP BY CASE WHEN orderstatus = 'O' THEN 'a' END"); assertQuery("SELECT CASE WHEN true THEN orderstatus END, count(*)\n" + "FROM orders\n" + "GROUP BY orderstatus"); } @Test public void testGroupByCase() throws Exception { // whole CASE in group by clause assertQuery("SELECT CASE orderstatus WHEN 'O' THEN 'a' ELSE 'b' END, count(*)\n" + "FROM orders\n" + "GROUP BY CASE orderstatus WHEN 'O' THEN 'a' ELSE 'b' END"); assertQuery( "SELECT CASE orderstatus WHEN 'O' THEN 'a' ELSE 'b' END, count(*)\n" + "FROM orders\n" + "GROUP BY 1", "SELECT CASE orderstatus WHEN 'O' THEN 'a' ELSE 'b' END, count(*)\n" + "FROM orders\n" + "GROUP BY CASE orderstatus WHEN 'O' THEN 'a' ELSE 'b' END"); // operand in group by clause assertQuery("SELECT CASE orderstatus WHEN 'O' THEN 'a' ELSE 'b' END, count(*)\n" + "FROM orders\n" + "GROUP BY orderstatus"); // condition in group by clause assertQuery("SELECT CASE 'O' WHEN orderstatus THEN 'a' ELSE 'b' END, count(*)\n" + "FROM orders\n" + "GROUP BY orderstatus"); // 'then' in group by clause assertQuery("SELECT CASE 1 WHEN 1 THEN orderstatus ELSE 'x' END, count(*)\n" + "FROM orders\n" + "GROUP BY orderstatus"); // 'else' in group by clause assertQuery("SELECT CASE 1 WHEN 1 THEN 'x' ELSE orderstatus END, count(*)\n" + "FROM orders\n" + "GROUP BY orderstatus"); } @Test public void testGroupByCaseNoElse() throws Exception { // whole CASE in group by clause assertQuery("SELECT CASE orderstatus WHEN 'O' THEN 'a' END, count(*)\n" + "FROM orders\n" + "GROUP BY CASE orderstatus WHEN 'O' THEN 'a' END"); // operand in group by clause assertQuery("SELECT CASE orderstatus WHEN 'O' THEN 'a' END, count(*)\n" + "FROM orders\n" + "GROUP BY orderstatus"); // condition in group by clause assertQuery("SELECT CASE 'O' WHEN orderstatus THEN 'a' END, count(*)\n" + "FROM orders\n" + "GROUP BY orderstatus"); // 'then' in group by clause assertQuery("SELECT CASE 1 WHEN 1 THEN orderstatus END, count(*)\n" + "FROM orders\n" + "GROUP BY orderstatus"); } @Test public void testGroupByCast() throws Exception { // whole CAST in group by expression assertQuery("SELECT CAST(orderkey AS VARCHAR), count(*) FROM orders GROUP BY CAST(orderkey AS VARCHAR)"); assertQuery( "SELECT CAST(orderkey AS VARCHAR), count(*) FROM orders GROUP BY 1", "SELECT CAST(orderkey AS VARCHAR), count(*) FROM orders GROUP BY CAST(orderkey AS VARCHAR)"); // argument in group by expression assertQuery("SELECT CAST(orderkey AS VARCHAR), count(*) FROM orders GROUP BY orderkey"); } @Test public void testGroupByCoalesce() throws Exception { // whole COALESCE in group by assertQuery("SELECT COALESCE(orderkey, custkey), count(*) FROM orders GROUP BY COALESCE(orderkey, custkey)"); assertQuery( "SELECT COALESCE(orderkey, custkey), count(*) FROM orders GROUP BY 1", "SELECT COALESCE(orderkey, custkey), count(*) FROM orders GROUP BY COALESCE(orderkey, custkey)" ); // operands in group by assertQuery("SELECT COALESCE(orderkey, 1), count(*) FROM orders GROUP BY orderkey"); // operands in group by assertQuery("SELECT COALESCE(1, orderkey), count(*) FROM orders GROUP BY orderkey"); } @Test public void testGroupByNullIf() throws Exception { // whole NULLIF in group by assertQuery("SELECT NULLIF(orderkey, custkey), count(*) FROM orders GROUP BY NULLIF(orderkey, custkey)"); assertQuery( "SELECT NULLIF(orderkey, custkey), count(*) FROM orders GROUP BY 1", "SELECT NULLIF(orderkey, custkey), count(*) FROM orders GROUP BY NULLIF(orderkey, custkey)"); // first operand in group by assertQuery("SELECT NULLIF(orderkey, 1), count(*) FROM orders GROUP BY orderkey"); // second operand in group by assertQuery("SELECT NULLIF(1, orderkey), count(*) FROM orders GROUP BY orderkey"); } @Test public void testGroupByExtract() throws Exception { // whole expression in group by assertQuery("SELECT EXTRACT(YEAR FROM now()), count(*) FROM orders GROUP BY EXTRACT(YEAR FROM now())"); assertQuery( "SELECT EXTRACT(YEAR FROM now()), count(*) FROM orders GROUP BY 1", "SELECT EXTRACT(YEAR FROM now()), count(*) FROM orders GROUP BY EXTRACT(YEAR FROM now())"); // argument in group by assertQuery("SELECT EXTRACT(YEAR FROM now()), count(*) FROM orders GROUP BY now()"); } @Test public void testGroupByBetween() throws Exception { // whole expression in group by assertQuery("SELECT orderkey BETWEEN 1 AND 100 FROM orders GROUP BY orderkey BETWEEN 1 AND 100 "); // expression in group by assertQuery("SELECT CAST(orderkey BETWEEN 1 AND 100 AS BIGINT) FROM orders GROUP BY orderkey"); // min in group by assertQuery("SELECT CAST(50 BETWEEN orderkey AND 100 AS BIGINT) FROM orders GROUP BY orderkey"); // max in group by assertQuery("SELECT CAST(50 BETWEEN 1 AND orderkey AS BIGINT) FROM orders GROUP BY orderkey"); } @Test public void testHaving() throws Exception { assertQuery("SELECT orderstatus, sum(totalprice) FROM orders GROUP BY orderstatus HAVING orderstatus = 'O'"); } @Test public void testHaving2() throws Exception { assertQuery("SELECT custkey, sum(orderkey) FROM orders GROUP BY custkey HAVING sum(orderkey) > 400000"); } @Test public void testHaving3() throws Exception { assertQuery("SELECT custkey, sum(totalprice) * 2 FROM orders GROUP BY custkey HAVING avg(totalprice + 5) > 10"); } @Test public void testColumnAliases() throws Exception { assertQuery( "SELECT x, T.y, z + 1 FROM (SELECT custkey, orderstatus, totalprice FROM orders) T (x, y, z)", "SELECT custkey, orderstatus, totalprice + 1 FROM orders"); } @Test public void testSameInputToAggregates() throws Exception { assertQuery("SELECT max(a), max(b) FROM (SELECT custkey a, custkey b FROM orders) x"); } @SuppressWarnings("PointlessArithmeticExpression") @Test public void testWindowFunctionsExpressions() { MaterializedResult actual = computeActual("" + "SELECT orderkey, orderstatus\n" + ", row_number() OVER (ORDER BY orderkey * 2) *\n" + " row_number() OVER (ORDER BY orderkey DESC) + 100\n" + "FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 10) x\n" + "ORDER BY orderkey LIMIT 5"); MaterializedResult expected = resultBuilder(FIXED_INT_64, VARIABLE_BINARY, FIXED_INT_64) .row(1, "O", (1 * 10) + 100) .row(2, "O", (2 * 9) + 100) .row(3, "F", (3 * 8) + 100) .row(4, "O", (4 * 7) + 100) .row(5, "F", (5 * 6) + 100) .build(); assertEquals(actual, expected); } @Test public void testWindowFunctionsFromAggregate() throws Exception { MaterializedResult actual = computeActual("" + "SELECT * FROM (\n" + " SELECT orderstatus, clerk, sales\n" + " , rank() OVER (PARTITION BY x.orderstatus ORDER BY sales DESC) rnk\n" + " FROM (\n" + " SELECT orderstatus, clerk, sum(totalprice) sales\n" + " FROM orders\n" + " GROUP BY orderstatus, clerk\n" + " ) x\n" + ") x\n" + "WHERE rnk <= 2\n" + "ORDER BY orderstatus, rnk"); MaterializedResult expected = resultBuilder(VARIABLE_BINARY, VARIABLE_BINARY, DOUBLE, FIXED_INT_64) .row("F", "Clerk#000000090", 2784836.61, 1) .row("F", "Clerk#000000084", 2674447.15, 2) .row("O", "Clerk#000000500", 2569878.29, 1) .row("O", "Clerk#000000050", 2500162.92, 2) .row("P", "Clerk#000000071", 841820.99, 1) .row("P", "Clerk#000001000", 643679.49, 2) .build(); assertEquals(actual, expected); } @Test public void testOrderByWindowFunction() throws Exception { MaterializedResult actual = computeActual("" + "SELECT orderkey, row_number() OVER (ORDER BY orderkey)\n" + "FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 10)\n" + "ORDER BY 2 DESC\n" + "LIMIT 5"); MaterializedResult expected = resultBuilder(FIXED_INT_64, FIXED_INT_64) .row(34, 10) .row(33, 9) .row(32, 8) .row(7, 7) .row(6, 6) .build(); assertEquals(actual, expected); } @Test public void testScalarFunction() throws Exception { assertQuery("SELECT SUBSTR('Quadratically', 5, 6) FROM orders LIMIT 1"); } @Test public void testCast() throws Exception { assertQuery("SELECT CAST('1' AS BIGINT) FROM orders"); assertQuery("SELECT CAST(totalprice AS BIGINT) FROM orders"); assertQuery("SELECT CAST(orderkey AS DOUBLE) FROM orders"); assertQuery("SELECT CAST(orderkey AS VARCHAR) FROM orders"); assertQuery("SELECT CAST(orderkey AS BOOLEAN) FROM orders"); } @Test public void testConcatOperator() throws Exception { assertQuery("SELECT '12' || '34' FROM orders LIMIT 1"); } @Test public void testQuotedIdentifiers() throws Exception { assertQuery("SELECT \"TOTALPRICE\" \"my price\" FROM \"ORDERS\""); } @Test(expectedExceptions = Exception.class, expectedExceptionsMessageRegExp = ".*orderkey_1.*") public void testInvalidColumn() throws Exception { computeActual("select * from lineitem l join (select orderkey_1, custkey from orders) o on l.orderkey = o.orderkey_1"); } @Test public void testUnaliasedSubqueries() throws Exception { assertQuery("SELECT orderkey FROM (SELECT orderkey FROM orders)"); } @Test public void testUnaliasedSubqueries1() throws Exception { assertQuery("SELECT a FROM (SELECT orderkey a FROM orders)"); } @Test public void testJoinUnaliasedSubqueries() throws Exception { assertQuery( "SELECT COUNT(*) FROM (SELECT * FROM lineitem) join (SELECT * FROM orders) using (orderkey)", "SELECT COUNT(*) FROM lineitem join orders on lineitem.orderkey = orders.orderkey" ); } @Test public void testWith() throws Exception { assertQuery("" + "WITH a AS (SELECT * FROM orders) " + "SELECT * FROM a", "SELECT * FROM orders"); } @Test public void testWithQualifiedPrefix() throws Exception { assertQuery("" + "WITH a AS (SELECT 123 FROM orders LIMIT 1)" + "SELECT a.* FROM a", "SELECT 123 FROM orders LIMIT 1"); } @Test public void testWithAliased() throws Exception { assertQuery("" + "WITH a AS (SELECT * FROM orders) " + "SELECT * FROM a x", "SELECT * FROM orders"); } @Test public void testReferenceToWithQueryInFromClause() throws Exception { assertQuery( "WITH a AS (SELECT * FROM orders)" + "SELECT * FROM (" + " SELECT * FROM a" + ")", "SELECT * FROM orders"); } @Test public void testWithChaining() throws Exception { assertQuery("" + "WITH a AS (SELECT orderkey n FROM orders)\n" + ", b AS (SELECT n + 1 n FROM a)\n" + ", c AS (SELECT n + 1 n FROM b)\n" + "SELECT n + 1 FROM c", "SELECT orderkey + 3 FROM orders"); } @Test public void testWithSelfJoin() throws Exception { assertQuery("" + "WITH x AS (SELECT DISTINCT orderkey FROM orders ORDER BY orderkey LIMIT 10)\n" + "SELECT count(*) FROM x a JOIN x b USING (orderkey)", "" + "SELECT count(*)\n" + "FROM (SELECT DISTINCT orderkey FROM orders ORDER BY orderkey LIMIT 10) a\n" + "JOIN (SELECT DISTINCT orderkey FROM orders ORDER BY orderkey LIMIT 10) b ON a.orderkey = b.orderkey"); } @Test public void testWithNestedSubqueries() throws Exception { assertQuery("" + "WITH a AS (\n" + " WITH aa AS (SELECT 123 x FROM orders LIMIT 1)\n" + " SELECT x y FROM aa\n" + "), b AS (\n" + " WITH bb AS (\n" + " WITH bbb AS (SELECT y FROM a)\n" + " SELECT bbb.* FROM bbb\n" + " )\n" + " SELECT y z FROM bb\n" + ")\n" + "SELECT *\n" + "FROM (\n" + " WITH q AS (SELECT z w FROM b)\n" + " SELECT j.*, k.*\n" + " FROM a j\n" + " JOIN q k ON (j.y = k.w)\n" + ") t", "" + "SELECT 123, 123 FROM orders LIMIT 1"); } @Test(enabled = false) public void testWithColumnAliasing() throws Exception { assertQuery( "WITH a (id) AS (SELECT 123 FROM orders LIMIT 1) SELECT * FROM a", "SELECT 123 FROM orders LIMIT 1"); } @Test public void testWithHiding() throws Exception { assertQuery("" + "WITH a AS (SELECT custkey FROM orders), " + " b AS (" + " WITH a AS (SELECT orderkey FROM orders)" + " SELECT * FROM a" + // should refer to inner 'a' " )" + "SELECT * FROM b", "SELECT orderkey FROM orders" ); } @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "Recursive WITH queries are not supported") public void testWithRecursive() throws Exception { computeActual("WITH RECURSIVE a AS (SELECT 123 FROM dual) SELECT * FROM a"); } @Test public void testCaseNoElse() throws Exception { assertQuery("SELECT orderkey, CASE orderstatus WHEN 'O' THEN 'a' END FROM orders"); } @Test public void testIfExpression() throws Exception { assertQuery( "SELECT sum(IF(orderstatus = 'F', totalprice, 0.0)) FROM orders", "SELECT sum(CASE WHEN orderstatus = 'F' THEN totalprice ELSE 0.0 END) FROM orders"); assertQuery( "SELECT sum(IF(orderstatus = 'Z', totalprice)) FROM orders", "SELECT sum(CASE WHEN orderstatus = 'Z' THEN totalprice END) FROM orders"); assertQuery( "SELECT sum(IF(orderstatus = 'F', NULL, totalprice)) FROM orders", "SELECT sum(CASE WHEN orderstatus = 'F' THEN NULL ELSE totalprice END) FROM orders"); assertQuery( "SELECT IF(orderstatus = 'Z', orderkey / 0, orderkey) FROM orders", "SELECT CASE WHEN orderstatus = 'Z' THEN orderkey / 0 ELSE orderkey END FROM orders"); assertQuery( "SELECT sum(IF(NULLIF(orderstatus, 'F') <> 'F', totalprice, 5.1)) FROM orders", "SELECT sum(CASE WHEN NULLIF(orderstatus, 'F') <> 'F' THEN totalprice ELSE 5.1 END) FROM orders"); } @Test public void testIn() throws Exception { assertQuery("SELECT orderkey FROM orders WHERE orderkey IN (1, 2, 3)"); assertQuery("SELECT orderkey FROM orders WHERE orderkey IN (1.5, 2.3)"); assertQuery("SELECT orderkey FROM orders WHERE totalprice IN (1, 2, 3)"); } @Test public void testGroupByIf() throws Exception { assertQuery( "SELECT IF(orderkey between 1 and 5, 'orders', 'others'), sum(totalprice) FROM orders GROUP BY 1", "SELECT CASE WHEN orderkey BETWEEN 1 AND 5 THEN 'orders' ELSE 'others' END, sum(totalprice)\n" + "FROM orders\n" + "GROUP BY CASE WHEN orderkey BETWEEN 1 AND 5 THEN 'orders' ELSE 'others' END"); } @Test public void testDuplicateFields() throws Exception { assertQuery( "SELECT * FROM (SELECT orderkey, orderkey FROM orders)", "SELECT orderkey, orderkey FROM orders"); } @Test public void testWildcardFromSubquery() throws Exception { assertQuery("SELECT * FROM (SELECT orderkey X FROM orders)"); } @Test public void testCaseInsensitiveOutputAliasInOrderBy() throws Exception { assertQueryOrdered("SELECT orderkey X FROM orders ORDER BY x"); } @Test public void testCaseInsensitiveAttribute() throws Exception { assertQuery("SELECT x FROM (SELECT orderkey X FROM orders)"); } @Test public void testCaseInsensitiveAliasedRelation() throws Exception { assertQuery("SELECT A.* FROM orders a"); } @Test public void testSubqueryBody() throws Exception { assertQuery("(SELECT orderkey, custkey FROM ORDERS)"); } @Test public void testSubqueryBodyOrderLimit() throws Exception { assertQueryOrdered("(SELECT orderkey AS a, custkey AS b FROM ORDERS) ORDER BY a LIMIT 1"); } @Test public void testSubqueryBodyProjectedOrderby() throws Exception { assertQueryOrdered("(SELECT orderkey, custkey FROM ORDERS) ORDER BY orderkey * -1"); } @Test public void testSubqueryBodyDoubleOrderby() throws Exception { assertQueryOrdered("(SELECT orderkey, custkey FROM ORDERS ORDER BY custkey) ORDER BY orderkey"); } @Test public void testNodeRoster() throws Exception { List<MaterializedTuple> result = computeActual("SELECT * FROM sys.node").getMaterializedTuples(); assertEquals(result.size(), getNodeCount()); } @Test public void testDual() throws Exception { MaterializedResult result = computeActual("SELECT * FROM dual"); List<MaterializedTuple> tuples = result.getMaterializedTuples(); assertEquals(tuples.size(), 1); } @Test public void testDefaultExplainTextFormat() { String query = "SELECT 123 FROM dual"; MaterializedResult result = computeActual("EXPLAIN " + query); String actual = Iterables.getOnlyElement(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertEquals(actual, getExplainPlan(query, LOGICAL)); } @Test public void testDefaultExplainGraphvizFormat() { String query = "SELECT 123 FROM dual"; MaterializedResult result = computeActual("EXPLAIN (FORMAT GRAPHVIZ) " + query); String actual = Iterables.getOnlyElement(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertEquals(actual, getGraphvizExplainPlan(query, LOGICAL)); } @Test public void testLogicalExplain() { String query = "SELECT 123 FROM dual"; MaterializedResult result = computeActual("EXPLAIN (TYPE LOGICAL) " + query); String actual = Iterables.getOnlyElement(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertEquals(actual, getExplainPlan(query, LOGICAL)); } @Test public void testLogicalExplainTextFormat() { String query = "SELECT 123 FROM dual"; MaterializedResult result = computeActual("EXPLAIN (TYPE LOGICAL, FORMAT TEXT) " + query); String actual = Iterables.getOnlyElement(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertEquals(actual, getExplainPlan(query, LOGICAL)); } @Test public void testLogicalExplainGraphvizFormat() { String query = "SELECT 123 FROM dual"; MaterializedResult result = computeActual("EXPLAIN (TYPE LOGICAL, FORMAT GRAPHVIZ) " + query); String actual = Iterables.getOnlyElement(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertEquals(actual, getGraphvizExplainPlan(query, LOGICAL)); } @Test public void testDistributedExplain() { String query = "SELECT 123 FROM dual"; MaterializedResult result = computeActual("EXPLAIN (TYPE DISTRIBUTED) " + query); String actual = Iterables.getOnlyElement(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertEquals(actual, getExplainPlan(query, DISTRIBUTED)); } @Test public void testDistributedExplainTextFormat() { String query = "SELECT 123 FROM dual"; MaterializedResult result = computeActual("EXPLAIN (TYPE DISTRIBUTED, FORMAT TEXT) " + query); String actual = Iterables.getOnlyElement(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertEquals(actual, getExplainPlan(query, DISTRIBUTED)); } @Test public void testDistributedExplainGraphvizFormat() { String query = "SELECT 123 FROM dual"; MaterializedResult result = computeActual("EXPLAIN (TYPE DISTRIBUTED, FORMAT GRAPHVIZ) " + query); String actual = Iterables.getOnlyElement(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertEquals(actual, getGraphvizExplainPlan(query, DISTRIBUTED)); } @Test public void testShowSchemas() throws Exception { MaterializedResult result = computeActual("SHOW SCHEMAS"); ImmutableSet<String> schemaNames = ImmutableSet.copyOf(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertTrue(schemaNames.containsAll(ImmutableSet.of(TPCH_SCHEMA_NAME, InformationSchemaMetadata.INFORMATION_SCHEMA, "node"))); } @Test public void testShowTables() throws Exception { MaterializedResult result = computeActual("SHOW TABLES"); ImmutableSet<String> tableNames = ImmutableSet.copyOf(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertEquals(tableNames, ImmutableSet.of(TPCH_ORDERS_NAME, TPCH_LINEITEM_NAME)); } @Test public void testShowTablesFrom() throws Exception { MaterializedResult result = computeActual("SHOW TABLES FROM DEFAULT"); ImmutableSet<String> tableNames = ImmutableSet.copyOf(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertEquals(tableNames, ImmutableSet.of(TPCH_ORDERS_NAME, TPCH_LINEITEM_NAME)); result = computeActual("SHOW TABLES FROM TPCH.DEFAULT"); tableNames = ImmutableSet.copyOf(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertEquals(tableNames, ImmutableSet.of(TPCH_ORDERS_NAME, TPCH_LINEITEM_NAME)); result = computeActual("SHOW TABLES FROM UNKNOWN"); tableNames = ImmutableSet.copyOf(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertEquals(tableNames, ImmutableSet.of()); } @Test public void testShowTablesLike() throws Exception { MaterializedResult result = computeActual("SHOW TABLES LIKE 'or%'"); ImmutableSet<String> tableNames = ImmutableSet.copyOf(transform(result.getMaterializedTuples(), onlyColumnGetter())); assertEquals(tableNames, ImmutableSet.of(TPCH_ORDERS_NAME)); } @Test public void testShowColumns() throws Exception { MaterializedResult actual = computeActual("SHOW COLUMNS FROM orders"); MaterializedResult expected = resultBuilder(VARIABLE_BINARY, VARIABLE_BINARY, BOOLEAN, BOOLEAN) .row("orderkey", "bigint", true, false) .row("custkey", "bigint", true, false) .row("orderstatus", "varchar", true, false) .row("totalprice", "double", true, false) .row("orderdate", "varchar", true, false) .row("orderpriority", "varchar", true, false) .row("clerk", "varchar", true, false) .row("shippriority", "bigint", true, false) .row("comment", "varchar", true, false) .build(); assertEquals(actual, expected); } @Test public void testShowPartitions() throws Exception { MaterializedResult result = computeActual("SHOW PARTITIONS FROM orders"); // table is not partitioned // TODO: add a partitioned table for tests and test where/order/limit assertEquals(result.getMaterializedTuples().size(), 0); } @Test public void testShowFunctions() throws Exception { MaterializedResult result = computeActual("SHOW FUNCTIONS"); ImmutableMultimap<String, MaterializedTuple> functions = Multimaps.index(result.getMaterializedTuples(), new Function<MaterializedTuple, String>() { @Override public String apply(MaterializedTuple input) { assertEquals(input.getFieldCount(), 5); return (String) input.getField(0); } }); assertTrue(functions.containsKey("avg"), "Expected function names " + functions + " to contain 'avg'"); assertEquals(functions.get("avg").asList().size(), 2); assertEquals(functions.get("avg").asList().get(0).getField(1), "double"); assertEquals(functions.get("avg").asList().get(0).getField(2), "bigint"); assertEquals(functions.get("avg").asList().get(0).getField(3), "aggregate"); assertEquals(functions.get("avg").asList().get(1).getField(1), "double"); assertEquals(functions.get("avg").asList().get(1).getField(2), "double"); assertEquals(functions.get("avg").asList().get(0).getField(3), "aggregate"); assertTrue(functions.containsKey("abs"), "Expected function names " + functions + " to contain 'abs'"); assertEquals(functions.get("abs").asList().get(0).getField(3), "scalar"); assertTrue(functions.containsKey("rand"), "Expected function names " + functions + " to contain 'rand'"); assertEquals(functions.get("rand").asList().get(0).getField(3), "scalar (non-deterministic)"); assertTrue(functions.containsKey("rank"), "Expected function names " + functions + " to contain 'rank'"); assertEquals(functions.get("rank").asList().get(0).getField(3), "window"); assertTrue(functions.containsKey("rank"), "Expected function names " + functions + " to contain 'split_part'"); assertEquals(functions.get("split_part").asList().get(0).getField(1), "varchar"); assertEquals(functions.get("split_part").asList().get(0).getField(2), "varchar, varchar, bigint"); assertEquals(functions.get("split_part").asList().get(0).getField(3), "scalar"); } @Test public void testNoFrom() throws Exception { assertQuery("SELECT 1 + 2, 3 + 4", "SELECT 1 + 2, 3 + 4 FROM orders LIMIT 1"); } @Test public void testTopNByMultipleFields() throws Exception { assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY orderkey ASC, custkey ASC LIMIT 10"); assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY orderkey ASC, custkey DESC LIMIT 10"); assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY orderkey DESC, custkey ASC LIMIT 10"); assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY orderkey DESC, custkey DESC LIMIT 10"); // now try with order by fields swapped assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY custkey ASC, orderkey ASC LIMIT 10"); assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY custkey ASC, orderkey DESC LIMIT 10"); assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY custkey DESC, orderkey ASC LIMIT 10"); assertQueryOrdered("SELECT orderkey, custkey, orderstatus FROM orders ORDER BY custkey DESC, orderkey DESC LIMIT 10"); } @Test public void testUnion() throws Exception { assertQuery("SELECT orderkey FROM orders UNION SELECT custkey FROM orders"); } @Test public void testUnionDistinct() throws Exception { assertQuery("SELECT orderkey FROM orders UNION DISTINCT SELECT custkey FROM orders"); } @Test public void testUnionAll() throws Exception { assertQuery("SELECT orderkey FROM orders UNION ALL SELECT custkey FROM orders"); } @Test public void testChainedUnionsWithOrder() throws Exception { assertQueryOrdered("SELECT orderkey FROM orders UNION (SELECT custkey FROM orders UNION SELECT linenumber FROM lineitem) UNION ALL SELECT orderkey FROM lineitem ORDER BY orderkey"); } @Test public void testSubqueryUnion() throws Exception { assertQueryOrdered("SELECT * FROM (SELECT orderkey FROM orders UNION SELECT custkey FROM orders UNION SELECT orderkey FROM orders) ORDER BY orderkey LIMIT 1000"); } @Test public void testSelectOnlyUnion() throws Exception { assertQuery("SELECT 123, 'foo' UNION ALL SELECT 999, 'bar'"); } @Test public void testMultiColumnUnionAll() throws Exception { assertQuery("SELECT * FROM orders UNION ALL SELECT * FROM orders"); } @Test public void testTableQuery() throws Exception { assertQuery("TABLE orders", "SELECT * FROM orders"); } @Test public void testTableQueryOrderLimit() throws Exception { assertQuery("TABLE orders ORDER BY orderkey LIMIT 10", "SELECT * FROM orders ORDER BY orderkey LIMIT 10", true); } @Test public void testTableQueryInUnion() throws Exception { assertQuery("(SELECT * FROM orders ORDER BY orderkey LIMIT 10) UNION ALL TABLE orders", "(SELECT * FROM orders ORDER BY orderkey LIMIT 10) UNION ALL SELECT * FROM orders"); } @Test public void testTableAsSubquery() throws Exception { assertQuery("(TABLE orders) ORDER BY orderkey", "(SELECT * FROM orders) ORDER BY orderkey", true); } @Test public void testLimitPushDown() throws Exception { MaterializedResult actual = computeActual( "(TABLE orders ORDER BY orderkey) UNION ALL " + "SELECT * FROM orders WHERE orderstatus = 'F' UNION ALL " + "(TABLE orders ORDER BY orderkey LIMIT 20) UNION ALL " + "(TABLE orders LIMIT 5) UNION ALL " + "TABLE orders LIMIT 10"); MaterializedResult all = computeExpected("SELECT * FROM ORDERS", actual.getTupleInfo()); assertEquals(actual.getMaterializedTuples().size(), 10); assertTrue(all.getMaterializedTuples().containsAll(actual.getMaterializedTuples())); } @Test public void testOrderLimitCompaction() throws Exception { assertQueryOrdered("SELECT * FROM (SELECT * FROM orders ORDER BY orderkey) LIMIT 10"); } @Test public void testUnaliasSymbolReferencesWithUnion() throws Exception { assertQuery("SELECT 1, 1, 'a', 'a' UNION ALL SELECT 1, 2, 'a', 'b'"); } @Test public void testSemiJoin() throws Exception { // Throw in a bunch of IN subquery predicates assertQuery("" + "SELECT *, o2.custkey\n" + " IN (\n" + " SELECT orderkey\n" + " FROM lineitem\n" + " WHERE orderkey % 3 = 0)\n" + "FROM (SELECT * FROM orders WHERE custkey % 128 = 0) o1\n" + "JOIN (SELECT * FROM orders WHERE custkey % 128 = 0) o2\n" + " ON (o1.orderkey IN (SELECT orderkey FROM lineitem WHERE orderkey % 4 = 0)) = (o2.orderkey IN (SELECT orderkey FROM lineitem WHERE orderkey % 4 = 0))\n" + "WHERE o1.orderkey\n" + " IN (\n" + " SELECT orderkey\n" + " FROM lineitem\n" + " WHERE orderkey % 2 = 0)\n" + "ORDER BY o1.orderkey\n" + " IN (\n" + " SELECT orderkey\n" + " FROM lineitem\n" + " WHERE orderkey % 6 = 0)"); assertQuery("" + "SELECT orderkey\n" + " IN (\n" + " SELECT orderkey\n" + " FROM lineitem\n" + " WHERE partkey % 2 = 0),\n" + " SUM(\n" + " CASE\n" + " WHEN orderkey\n" + " IN (\n" + " SELECT orderkey\n" + " FROM lineitem\n" + " WHERE suppkey % 2 = 0)\n" + " THEN 1\n" + " ELSE 0\n" + " END)\n" + "FROM orders\n" + "GROUP BY orderkey\n" + " IN (\n" + " SELECT orderkey\n" + " FROM lineitem\n" + " WHERE partkey % 2 = 0)\n" + "HAVING SUM(\n" + " CASE\n" + " WHEN orderkey\n" + " IN (\n" + " SELECT orderkey\n" + " FROM lineitem\n" + " WHERE suppkey % 2 = 0)\n" + " THEN 1\n" + " ELSE 0\n" + " END) > 1"); } @Test public void testAntiJoin() throws Exception { assertQuery("" + "SELECT *, orderkey\n" + " NOT IN (\n" + " SELECT orderkey\n" + " FROM lineitem\n" + " WHERE orderkey % 3 = 0)\n" + "FROM orders"); } @Test public void testSemiJoinLimitPushDown() throws Exception { assertQuery("" + "SELECT COUNT(*)\n" + "FROM (\n" + " SELECT orderkey\n" + " IN (\n" + " SELECT orderkey\n" + " FROM lineitem\n" + " WHERE orderkey % 2 = 0)\n" + " FROM orders\n" + " LIMIT 10)"); } @Test public void testSemiJoinNullHandling() throws Exception { assertQuery("" + "SELECT orderkey\n" + " IN (\n" + " SELECT CASE WHEN orderkey % 3 = 0 THEN NULL ELSE orderkey END\n" + " FROM lineitem)\n" + "FROM orders"); assertQuery("" + "SELECT orderkey\n" + " IN (\n" + " SELECT orderkey\n" + " FROM lineitem)\n" + "FROM (\n" + " SELECT CASE WHEN orderkey % 3 = 0 THEN NULL ELSE orderkey END AS orderkey\n" + " FROM orders)"); assertQuery("" + "SELECT orderkey\n" + " IN (\n" + " SELECT CASE WHEN orderkey % 3 = 0 THEN NULL ELSE orderkey END\n" + " FROM lineitem)\n" + "FROM (\n" + " SELECT CASE WHEN orderkey % 4 = 0 THEN NULL ELSE orderkey END AS orderkey\n" + " FROM orders)"); } @Test public void testPredicatePushdown() throws Exception { assertQuery("" + "SELECT *\n" + "FROM (\n" + " SELECT orderkey+1 as a FROM orders WHERE orderstatus = 'F' UNION ALL \n" + " SELECT orderkey FROM orders WHERE orderkey % 2 = 0 UNION ALL \n" + " (SELECT orderkey+custkey FROM orders ORDER BY orderkey LIMIT 10)\n" + ") \n" + "WHERE a < 20 OR a > 100 \n" + "ORDER BY a"); } @Test public void testJoinPredicatePushdown() throws Exception { assertQuery("" + "SELECT COUNT(*)\n" + "FROM lineitem \n" + "JOIN (\n" + " SELECT * FROM orders\n" + ") orders \n" + "ON lineitem.orderkey = orders.orderkey \n" + "WHERE orders.orderkey % 4 = 0\n" + " AND lineitem.suppkey > orders.orderkey"); } @Test public void testLeftJoinAsInnerPredicatePushdown() throws Exception { assertQuery("" + "SELECT COUNT(*)\n" + "FROM lineitem \n" + "LEFT JOIN (\n" + " SELECT * FROM orders WHERE orders.orderkey % 2 = 0\n" + ") orders \n" + "ON lineitem.orderkey = orders.orderkey \n" + "WHERE orders.orderkey % 4 = 0\n" + " AND (lineitem.suppkey % 2 = orders.orderkey % 2 OR orders.custkey IS NULL)"); } @Test public void testPlainLeftJoinPredicatePushdown() throws Exception { assertQuery("" + "SELECT COUNT(*)\n" + "FROM lineitem \n" + "LEFT JOIN (\n" + " SELECT * FROM orders WHERE orders.orderkey % 2 = 0\n" + ") orders \n" + "ON lineitem.orderkey = orders.orderkey \n" + "WHERE lineitem.orderkey % 4 = 0\n" + " AND (lineitem.suppkey % 2 = orders.orderkey % 2 OR orders.orderkey IS NULL)"); } @Test public void testLeftJoinPredicatePushdownWithSelfEquality() throws Exception { assertQuery("" + "SELECT COUNT(*)\n" + "FROM lineitem \n" + "LEFT JOIN (\n" + " SELECT * FROM orders WHERE orders.orderkey % 2 = 0\n" + ") orders \n" + "ON lineitem.orderkey = orders.orderkey \n" + "WHERE orders.orderkey = orders.orderkey\n" + " AND lineitem.orderkey % 4 = 0\n" + " AND (lineitem.suppkey % 2 = orders.orderkey % 2 OR orders.orderkey IS NULL)"); } @Test public void testRightJoinAsInnerPredicatePushdown() throws Exception { assertQuery("" + "SELECT COUNT(*)\n" + "FROM (\n" + " SELECT * FROM orders WHERE orders.orderkey % 2 = 0\n" + ") orders\n" + "RIGHT JOIN lineitem\n" + "ON lineitem.orderkey = orders.orderkey \n" + "WHERE orders.orderkey % 4 = 0\n" + " AND (lineitem.suppkey % 2 = orders.orderkey % 2 OR orders.custkey IS NULL)"); } @Test public void testPlainRightJoinPredicatePushdown() throws Exception { assertQuery("" + "SELECT COUNT(*)\n" + "FROM (\n" + " SELECT * FROM orders WHERE orders.orderkey % 2 = 0\n" + ") orders \n" + "RIGHT JOIN lineitem\n" + "ON lineitem.orderkey = orders.orderkey \n" + "WHERE lineitem.orderkey % 4 = 0\n" + " AND (lineitem.suppkey % 2 = orders.orderkey % 2 OR orders.orderkey IS NULL)"); } @Test public void testRightJoinPredicatePushdownWithSelfEquality() throws Exception { assertQuery("" + "SELECT COUNT(*)\n" + "FROM (\n" + " SELECT * FROM orders WHERE orders.orderkey % 2 = 0\n" + ") orders \n" + "RIGHT JOIN lineitem\n" + "ON lineitem.orderkey = orders.orderkey \n" + "WHERE orders.orderkey = orders.orderkey\n" + " AND lineitem.orderkey % 4 = 0\n" + " AND (lineitem.suppkey % 2 = orders.orderkey % 2 OR orders.orderkey IS NULL)"); } @Test public void testPredicatePushdownJoinEqualityGroups() throws Exception { assertQuery("" + "SELECT *\n" + "FROM (\n" + " SELECT custkey custkey1, custkey%4 custkey1a, custkey%8 custkey1b, custkey%16 custkey1c\n" + " FROM orders\n" + ") orders1 \n" + "JOIN (\n" + " SELECT custkey custkey2, custkey%4 custkey2a, custkey%8 custkey2b\n" + " FROM orders\n" + ") orders2 ON orders1.custkey1 = orders2.custkey2\n" + "WHERE custkey2a = custkey2b\n" + " AND custkey1 = custkey1a\n" + " AND custkey2 = custkey2a\n" + " AND custkey1a = custkey1c\n" + " AND custkey1b = custkey1c\n" + " AND custkey1b % 2 = 0"); } @Test public void testGroupByKeyPredicatePushdown() throws Exception { assertQuery("" + "SELECT *\n" + "FROM (\n" + " SELECT custkey1, orderstatus1, SUM(totalprice1) totalprice, MAX(custkey2) maxcustkey\n" + " FROM (\n" + " SELECT *\n" + " FROM (\n" + " SELECT custkey custkey1, orderstatus orderstatus1, CAST(totalprice AS BIGINT) totalprice1, orderkey orderkey1\n" + " FROM orders\n" + " ) orders1 \n" + " JOIN (\n" + " SELECT custkey custkey2, orderstatus orderstatus2, CAST(totalprice AS BIGINT) totalprice2, orderkey orderkey2\n" + " FROM orders\n" + " ) orders2 ON orders1.orderkey1 = orders2.orderkey2\n" + " ) \n" + " GROUP BY custkey1, orderstatus1\n" + ")\n" + "WHERE custkey1 = maxcustkey\n" + "AND maxcustkey % 2 = 0 \n" + "AND orderstatus1 = 'F'\n" + "AND totalprice > 10000\n" + "ORDER BY custkey1, orderstatus1, totalprice, maxcustkey"); } @Test public void testNonDeterministicJoinPredicatePushdown() throws Exception { MaterializedResult materializedResult = computeActual("" + "SELECT COUNT(*)\n" + "FROM (\n" + " SELECT DISTINCT *\n" + " FROM (\n" + " SELECT 'abc' as col1a, 500 as col1b FROM lineitem limit 1\n" + " ) table1\n" + " JOIN (\n" + " SELECT 'abc' as col2a FROM lineitem limit 1000000\n" + " ) table2\n" + " ON table1.col1a = table2.col2a\n" + " WHERE rand() * 1000 > table1.col1b\n" + ")"); MaterializedTuple tuple = Iterables.getOnlyElement(materializedResult.getMaterializedTuples()); Assert.assertEquals(tuple.getFieldCount(), 1); long count = (Long) tuple.getField(0); // Technically non-deterministic unit test but has essentially a next to impossible chance of a false positive Assert.assertTrue(count > 0 && count < 1000000); } @Test public void testTrivialNonDeterministicPredicatePushdown() throws Exception { assertQuery("SELECT COUNT(*) FROM dual WHERE rand() >= 0"); } @Test public void testNonDeterministicTableScanPredicatePushdown() throws Exception { MaterializedResult materializedResult = computeActual("" + "SELECT COUNT(*)\n" + "FROM (\n" + " SELECT *\n" + " FROM lineitem\n" + " LIMIT 1000\n" + ")\n" + "WHERE rand() > 0.5"); MaterializedTuple tuple = Iterables.getOnlyElement(materializedResult.getMaterializedTuples()); Assert.assertEquals(tuple.getFieldCount(), 1); long count = (Long) tuple.getField(0); // Technically non-deterministic unit test but has essentially a next to impossible chance of a false positive Assert.assertTrue(count > 0 && count < 1000); } @Test public void testNonDeterministicAggregationPredicatePushdown() throws Exception { MaterializedResult materializedResult = computeActual("" + "SELECT COUNT(*)\n" + "FROM (\n" + " SELECT orderkey, COUNT(*)\n" + " FROM lineitem\n" + " GROUP BY orderkey\n" + " LIMIT 1000\n" + ")\n" + "WHERE rand() > 0.5"); MaterializedTuple tuple = Iterables.getOnlyElement(materializedResult.getMaterializedTuples()); Assert.assertEquals(tuple.getFieldCount(), 1); long count = (Long) tuple.getField(0); // Technically non-deterministic unit test but has essentially a next to impossible chance of a false positive Assert.assertTrue(count > 0 && count < 1000); } public void testSemiJoinPredicateMoveAround() throws Exception { assertQuery("" + "SELECT COUNT(*)\n" + "FROM (SELECT * FROM orders WHERE custkey % 2 = 0 AND orderkey % 3 = 0)\n" + "WHERE orderkey\n" + " IN (\n" + " SELECT CASE WHEN orderkey % 7 = 0 THEN NULL ELSE orderkey END\n" + " FROM lineitem\n" + " WHERE partkey % 2 = 0)\n" + " AND\n" + " orderkey % 2 = 0"); } @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "\\QUnexpected parameters (bigint) for function length. Expected: length(varchar)\\E") public void testFunctionNotRegistered() { computeActual("SELECT length(1)"); } @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "Types are not comparable with '<>': bigint vs varchar") public void testTypeMismatch() { computeActual("SELECT 1 <> 'x'"); } @BeforeClass(alwaysRun = true) public void setupDatabase() throws Exception { Logging.initialize(); handle = DBI.open("jdbc:h2:mem:test" + System.nanoTime()); RecordSet ordersRecords = readTpchRecords(TPCH_ORDERS_METADATA); handle.execute("CREATE TABLE orders (\n" + " orderkey BIGINT PRIMARY KEY,\n" + " custkey BIGINT NOT NULL,\n" + " orderstatus CHAR(1) NOT NULL,\n" + " totalprice DOUBLE NOT NULL,\n" + " orderdate CHAR(10) NOT NULL,\n" + " orderpriority CHAR(15) NOT NULL,\n" + " clerk CHAR(15) NOT NULL,\n" + " shippriority BIGINT NOT NULL,\n" + " comment VARCHAR(79) NOT NULL\n" + ")"); insertRows(TPCH_ORDERS_METADATA, handle, ordersRecords); RecordSet lineItemRecords = readTpchRecords(TPCH_LINEITEM_METADATA); handle.execute("CREATE TABLE lineitem (\n" + " orderkey BIGINT,\n" + " partkey BIGINT NOT NULL,\n" + " suppkey BIGINT NOT NULL,\n" + " linenumber BIGINT,\n" + " quantity BIGINT NOT NULL,\n" + " extendedprice DOUBLE NOT NULL,\n" + " discount DOUBLE NOT NULL,\n" + " tax DOUBLE NOT NULL,\n" + " returnflag CHAR(1) NOT NULL,\n" + " linestatus CHAR(1) NOT NULL,\n" + " shipdate CHAR(10) NOT NULL,\n" + " commitdate CHAR(10) NOT NULL,\n" + " receiptdate CHAR(10) NOT NULL,\n" + " shipinstruct VARCHAR(25) NOT NULL,\n" + " shipmode VARCHAR(10) NOT NULL,\n" + " comment VARCHAR(44) NOT NULL,\n" + " PRIMARY KEY (orderkey, linenumber)" + ")"); insertRows(TPCH_LINEITEM_METADATA, handle, lineItemRecords); setUpQueryFramework(TpchMetadata.TPCH_CATALOG_NAME, TpchMetadata.TPCH_SCHEMA_NAME); } @AfterClass(alwaysRun = true) public void cleanupDatabase() throws Exception { tearDownQueryFramework(); handle.close(); } protected abstract int getNodeCount(); protected abstract void setUpQueryFramework(String catalog, String schema) throws Exception; protected void tearDownQueryFramework() throws Exception { } protected abstract MaterializedResult computeActual(@Language("SQL") String sql); protected void assertQuery(@Language("SQL") String sql) throws Exception { assertQuery(sql, sql, false); } private void assertQueryOrdered(@Language("SQL") String sql) throws Exception { assertQuery(sql, sql, true); } protected void assertQuery(@Language("SQL") String actual, @Language("SQL") String expected) throws Exception { assertQuery(actual, expected, false); } private static final Logger log = Logger.get(AbstractTestQueries.class); private void assertQuery(@Language("SQL") String actual, @Language("SQL") String expected, boolean ensureOrdering) throws Exception { long start = System.nanoTime(); MaterializedResult actualResults = computeActual(actual); log.info("FINISHED in %s", Duration.nanosSince(start)); MaterializedResult expectedResults = computeExpected(expected, actualResults.getTupleInfo()); if (ensureOrdering) { assertEquals(actualResults.getMaterializedTuples(), expectedResults.getMaterializedTuples()); } else { assertEqualsIgnoreOrder(actualResults.getMaterializedTuples(), expectedResults.getMaterializedTuples()); } } public static void assertEqualsIgnoreOrder(Iterable<?> actual, Iterable<?> expected) { assertNotNull(actual, "actual is null"); assertNotNull(expected, "expected is null"); ImmutableMultiset<?> actualSet = ImmutableMultiset.copyOf(actual); ImmutableMultiset<?> expectedSet = ImmutableMultiset.copyOf(expected); if (!actualSet.equals(expectedSet)) { fail(format("not equal\nActual %s rows:\n %s\nExpected %s rows:\n %s\n", actualSet.size(), Joiner.on("\n ").join(Iterables.limit(actualSet, 100)), expectedSet.size(), Joiner.on("\n ").join(Iterables.limit(expectedSet, 100)))); } } private MaterializedResult computeExpected(@Language("SQL") final String sql, TupleInfo resultTupleInfo) { return new MaterializedResult( handle.createQuery(sql) .map(tupleMapper(resultTupleInfo)) .list(), resultTupleInfo ); } private static ResultSetMapper<Tuple> tupleMapper(final TupleInfo tupleInfo) { return new ResultSetMapper<Tuple>() { @Override public Tuple map(int index, ResultSet resultSet, StatementContext ctx) throws SQLException { List<TupleInfo.Type> types = tupleInfo.getTypes(); int count = resultSet.getMetaData().getColumnCount(); checkArgument(types.size() == count, "tuple info does not match result"); TupleInfo.Builder builder = tupleInfo.builder(); for (int i = 1; i <= count; i++) { TupleInfo.Type type = types.get(i - 1); switch (type) { case BOOLEAN: boolean booleanValue = resultSet.getBoolean(i); if (resultSet.wasNull()) { builder.appendNull(); } else { builder.append(booleanValue); } break; case FIXED_INT_64: long longValue = resultSet.getLong(i); if (resultSet.wasNull()) { builder.appendNull(); } else { builder.append(longValue); } break; case DOUBLE: double doubleValue = resultSet.getDouble(i); if (resultSet.wasNull()) { builder.appendNull(); } else { builder.append(doubleValue); } break; case VARIABLE_BINARY: String value = resultSet.getString(i); if (resultSet.wasNull()) { builder.appendNull(); } else { builder.append(Slices.wrappedBuffer(value.getBytes(UTF_8))); } break; default: throw new AssertionError("unhandled type: " + type); } } return builder.build(); } }; } private static void insertRows(TableMetadata tableMetadata, Handle handle, RecordSet data) { String vars = Joiner.on(',').join(nCopies(tableMetadata.getColumns().size(), "?")); String sql = format("INSERT INTO %s VALUES (%s)", tableMetadata.getTable().getTableName(), vars); RecordCursor cursor = data.cursor(); while (true) { // insert 1000 rows at a time PreparedBatch batch = handle.prepareBatch(sql); for (int row = 0; row < 1000; row++) { if (!cursor.advanceNextPosition()) { batch.execute(); return; } PreparedBatchPart part = batch.add(); for (int column = 0; column < tableMetadata.getColumns().size(); column++) { ColumnMetadata columnMetadata = tableMetadata.getColumns().get(column); switch (columnMetadata.getType()) { case BOOLEAN: part.bind(column, cursor.getBoolean(column)); break; case LONG: part.bind(column, cursor.getLong(column)); break; case DOUBLE: part.bind(column, cursor.getDouble(column)); break; case STRING: part.bind(column, new String(cursor.getString(column), UTF_8)); break; } } } batch.execute(); } } private Function<MaterializedTuple, String> onlyColumnGetter() { return new Function<MaterializedTuple, String>() { @Override public String apply(MaterializedTuple input) { assertEquals(input.getFieldCount(), 1); return (String) input.getField(0); } }; } private static String getExplainPlan(String query, ExplainType.Type planType) { QueryExplainer explainer = getQueryExplainer(); return explainer.getPlan((Query) SqlParser.createStatement(query), planType); } private static String getGraphvizExplainPlan(String query, ExplainType.Type planType) { QueryExplainer explainer = getQueryExplainer(); return explainer.getGraphvizPlan((Query) SqlParser.createStatement(query), planType); } private static QueryExplainer getQueryExplainer() { Session session = new Session("user", "test", DEFAULT_CATALOG, DEFAULT_SCHEMA, null, null); MetadataManager metadata = new MetadataManager(); metadata.addInternalSchemaMetadata(new DualMetadata()); List<PlanOptimizer> optimizers = new PlanOptimizersFactory(metadata).get(); return new QueryExplainer(session, optimizers, metadata, new MockPeriodicImportManager(), new MockStorageManager()); } }
Reduce memory usage on semi join test It was failing sporadically on the distributed test due to heap space
presto-main/src/test/java/com/facebook/presto/AbstractTestQueries.java
Reduce memory usage on semi join test
<ide><path>resto-main/src/test/java/com/facebook/presto/AbstractTestQueries.java <ide> " IN (\n" + <ide> " SELECT orderkey\n" + <ide> " FROM lineitem\n" + <del> " WHERE orderkey % 3 = 0)\n" + <del> "FROM (SELECT * FROM orders WHERE custkey % 128 = 0) o1\n" + <del> "JOIN (SELECT * FROM orders WHERE custkey % 128 = 0) o2\n" + <add> " WHERE orderkey % 5 = 0)\n" + <add> "FROM (SELECT * FROM orders WHERE custkey % 256 = 0) o1\n" + <add> "JOIN (SELECT * FROM orders WHERE custkey % 256 = 0) o2\n" + <ide> " ON (o1.orderkey IN (SELECT orderkey FROM lineitem WHERE orderkey % 4 = 0)) = (o2.orderkey IN (SELECT orderkey FROM lineitem WHERE orderkey % 4 = 0))\n" + <ide> "WHERE o1.orderkey\n" + <ide> " IN (\n" + <ide> " SELECT orderkey\n" + <ide> " FROM lineitem\n" + <del> " WHERE orderkey % 2 = 0)\n" + <add> " WHERE orderkey % 4 = 0)\n" + <ide> "ORDER BY o1.orderkey\n" + <ide> " IN (\n" + <ide> " SELECT orderkey\n" + <ide> " FROM lineitem\n" + <del> " WHERE orderkey % 6 = 0)"); <add> " WHERE orderkey % 7 = 0)"); <ide> assertQuery("" + <ide> "SELECT orderkey\n" + <ide> " IN (\n" + <ide> " SELECT orderkey\n" + <ide> " FROM lineitem\n" + <del> " WHERE partkey % 2 = 0),\n" + <add> " WHERE partkey % 4 = 0),\n" + <ide> " SUM(\n" + <ide> " CASE\n" + <ide> " WHEN orderkey\n" + <ide> " IN (\n" + <ide> " SELECT orderkey\n" + <ide> " FROM lineitem\n" + <del> " WHERE suppkey % 2 = 0)\n" + <add> " WHERE suppkey % 4 = 0)\n" + <ide> " THEN 1\n" + <ide> " ELSE 0\n" + <ide> " END)\n" + <ide> " IN (\n" + <ide> " SELECT orderkey\n" + <ide> " FROM lineitem\n" + <del> " WHERE partkey % 2 = 0)\n" + <add> " WHERE partkey % 4 = 0)\n" + <ide> "HAVING SUM(\n" + <ide> " CASE\n" + <ide> " WHEN orderkey\n" + <ide> " IN (\n" + <ide> " SELECT orderkey\n" + <ide> " FROM lineitem\n" + <del> " WHERE suppkey % 2 = 0)\n" + <add> " WHERE suppkey % 4 = 0)\n" + <ide> " THEN 1\n" + <ide> " ELSE 0\n" + <ide> " END) > 1"); <ide> Assert.assertTrue(count > 0 && count < 1000); <ide> } <ide> <add> @Test <ide> public void testSemiJoinPredicateMoveAround() <ide> throws Exception <ide> {
Java
apache-2.0
794c7751fa42b6b4a15d4174855849569b442d45
0
nikita36078/J2ME-Loader,nikita36078/J2ME-Loader,nikita36078/J2ME-Loader,nikita36078/J2ME-Loader
/* * Copyright 2012 Kulikov Dmitriy * Copyright 2017-2018 Nikita Shakarun * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package javax.microedition.lcdui; import android.content.Context; import android.view.View; import android.widget.AdapterView; import android.widget.ListView; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import javax.microedition.lcdui.event.SimpleEvent; import javax.microedition.lcdui.list.CompoundListAdapter; import javax.microedition.lcdui.list.ItemSelector; public class List extends Screen implements Choice, ItemSelector { public static final Command SELECT_COMMAND = new Command("", Command.SCREEN, 0); private ArrayList<String> strings = new ArrayList<>(); private ArrayList<Image> images = new ArrayList<>(); private final ArrayList<Boolean> selected = new ArrayList<>(); private ListView list; private CompoundListAdapter adapter; private int listType; private int selectedIndex = -1; private int fitPolicy; private Command selectCommand = SELECT_COMMAND; private SimpleEvent msgSetSelection = new SimpleEvent() { @Override public void process() { list.setSelection(selectedIndex); } }; private class ClickListener implements AdapterView.OnItemClickListener { @Override public void onItemClick(AdapterView parent, View view, int position, long id) { selectedIndex = position; switch (listType) { case IMPLICIT: fireCommandAction(selectCommand, List.this); break; case EXCLUSIVE: if (position >= 0 && position < selected.size()) { Collections.fill(selected, Boolean.FALSE); selected.set(position, Boolean.TRUE); } break; case MULTIPLE: if (position >= 0 && position < selected.size()) { selected.set(position, !selected.get(position)); } break; } adapter.notifyDataSetChanged(); } } private ClickListener clicklistener = new ClickListener(); public List(String title, int listType) { switch (listType) { case IMPLICIT: case EXCLUSIVE: case MULTIPLE: this.listType = listType; break; default: throw new IllegalArgumentException("list type " + listType + " is not supported"); } setTitle(title); } public List(String title, int listType, String[] stringElements, Image[] imageElements) { this(title, listType); if (stringElements != null && imageElements != null && imageElements.length != stringElements.length) { throw new IllegalArgumentException("string and image arrays have different length"); } if (stringElements != null) { strings.addAll(Arrays.asList(stringElements)); } if (imageElements != null) { images.addAll(Arrays.asList(imageElements)); } int size = Math.max(strings.size(), images.size()); if (size > 0) { selected.addAll(Collections.nCopies(size, Boolean.FALSE)); if (strings.size() == 0) { strings.addAll(Collections.nCopies(size, null)); } if (images.size() == 0) { images.addAll(Collections.nCopies(size, null)); } } } public void setSelectCommand(Command cmd) { if (selectCommand != SELECT_COMMAND) { removeCommand(selectCommand); } if (cmd != null) { addCommand(selectCommand = cmd); } else { selectCommand = SELECT_COMMAND; } } @Override public int append(String stringPart, Image imagePart) { synchronized (selected) { int index = selected.size(); boolean select = index == 0 && listType != MULTIPLE; strings.add(stringPart); images.add(imagePart); selected.add(select); if (select) { selectedIndex = index; } if (list != null) { adapter.append(stringPart, imagePart); } return index; } } @Override public void delete(int elementNum) { synchronized (selected) { strings.remove(elementNum); images.remove(elementNum); selected.remove(elementNum); if (selected.size() == 0) { selectedIndex = -1; } if (list != null) { adapter.delete(elementNum); } } } @Override public void deleteAll() { synchronized (selected) { strings.clear(); images.clear(); selected.clear(); selectedIndex = -1; if (list != null) { adapter.deleteAll(); } } } @Override public Image getImage(int elementNum) { return images.get(elementNum); } @Override public int getSelectedFlags(boolean[] selectedArray) { synchronized (selected) { if (selectedArray.length < selected.size()) { throw new IllegalArgumentException("return array is too short"); } int index = 0; int selectedCount = 0; for (Boolean flag : selected) { if (flag) { selectedCount++; } selectedArray[index++] = flag; } while (index < selectedArray.length) { selectedArray[index++] = false; } return selectedCount; } } @Override public int getSelectedIndex() { return selectedIndex; } @Override public String getString(int elementNum) { return strings.get(elementNum); } @Override public void insert(int elementNum, String stringPart, Image imagePart) { synchronized (selected) { boolean select = selected.size() == 0 && listType != MULTIPLE; strings.add(elementNum, stringPart); images.add(elementNum, imagePart); selected.add(elementNum, select); if (select) { selectedIndex = elementNum; } if (list != null) { adapter.insert(elementNum, stringPart, imagePart); } } } @Override public boolean isSelected(int elementNum) { synchronized (selected) { return selected.get(elementNum); } } @Override public void set(int elementNum, String stringPart, Image imagePart) { synchronized (selected) { strings.set(elementNum, stringPart); images.set(elementNum, imagePart); if (list != null) { adapter.set(elementNum, stringPart, imagePart); } } } @Override public void setSelectedFlags(boolean[] selectedArray) { if (listType == EXCLUSIVE || listType == IMPLICIT) { for (int i = 0; i < selectedArray.length; i++) { if (selectedArray[i]) { setSelectedIndex(i, true); return; } } } synchronized (selected) { if (selectedArray.length < selected.size()) { throw new IllegalArgumentException("array is too short"); } int size = selected.size(); for (int i = 0; i < size; i++) { selected.set(i, selectedArray[i]); } } } @Override public void setSelectedIndex(int elementNum, boolean flag) { synchronized (selected) { selected.set(elementNum, flag); if (flag) { selectedIndex = elementNum; } if (list != null) { if (flag) { ViewHandler.postEvent(msgSetSelection); } } } } public void setFont(int elementNum, Font font) { } public Font getFont(int elementNum) { return Font.getDefaultFont(); } public void setFitPolicy(int fitPolicy) { this.fitPolicy = fitPolicy; } public int getFitPolicy() { return fitPolicy; } @Override public int size() { synchronized (selected) { return selected.size(); } } @Override public View getScreenView() { Context context = getParentActivity(); adapter = new CompoundListAdapter(context, this, listType); list = new ListView(context); list.setAdapter(adapter); int size = selected.size(); for (int i = 0; i < size; i++) { adapter.append(strings.get(i), images.get(i)); } if (listType == IMPLICIT && selectedIndex >= 0 && selectedIndex < selected.size()) { list.setSelection(selectedIndex); } list.setOnItemClickListener(clicklistener); return list; } @Override public void clearScreenView() { list = null; adapter = null; } }
app/src/main/java/javax/microedition/lcdui/List.java
/* * Copyright 2012 Kulikov Dmitriy * Copyright 2017-2018 Nikita Shakarun * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package javax.microedition.lcdui; import android.content.Context; import android.view.View; import android.widget.AdapterView; import android.widget.ListView; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import javax.microedition.lcdui.event.SimpleEvent; import javax.microedition.lcdui.list.CompoundListAdapter; import javax.microedition.lcdui.list.ItemSelector; public class List extends Screen implements Choice, ItemSelector { public static final Command SELECT_COMMAND = new Command("", Command.SCREEN, 0); private ArrayList<String> strings = new ArrayList<>(); private ArrayList<Image> images = new ArrayList<>(); private final ArrayList<Boolean> selected = new ArrayList<>(); private ListView list; private CompoundListAdapter adapter; private int listType; private int selectedIndex = -1; private int fitPolicy; private Command selectCommand = SELECT_COMMAND; private SimpleEvent msgSetSelection = new SimpleEvent() { @Override public void process() { list.setSelection(selectedIndex); } }; private class ClickListener implements AdapterView.OnItemClickListener { @Override public void onItemClick(AdapterView parent, View view, int position, long id) { selectedIndex = position; switch (listType) { case IMPLICIT: fireCommandAction(selectCommand, List.this); break; case EXCLUSIVE: if (selectedIndex >= 0 && selectedIndex < selected.size()) { selected.set(selectedIndex, Boolean.FALSE); } if (position >= 0 && position < selected.size()) { selected.set(position, Boolean.TRUE); } break; case MULTIPLE: if (position >= 0 && position < selected.size()) { selected.set(position, !selected.get(position)); } break; } adapter.notifyDataSetChanged(); } } private ClickListener clicklistener = new ClickListener(); public List(String title, int listType) { switch (listType) { case IMPLICIT: case EXCLUSIVE: case MULTIPLE: this.listType = listType; break; default: throw new IllegalArgumentException("list type " + listType + " is not supported"); } setTitle(title); } public List(String title, int listType, String[] stringElements, Image[] imageElements) { this(title, listType); if (stringElements != null && imageElements != null && imageElements.length != stringElements.length) { throw new IllegalArgumentException("string and image arrays have different length"); } if (stringElements != null) { strings.addAll(Arrays.asList(stringElements)); } if (imageElements != null) { images.addAll(Arrays.asList(imageElements)); } int size = Math.max(strings.size(), images.size()); if (size > 0) { selected.addAll(Collections.nCopies(size, Boolean.FALSE)); if (strings.size() == 0) { strings.addAll(Collections.nCopies(size, null)); } if (images.size() == 0) { images.addAll(Collections.nCopies(size, null)); } } } public void setSelectCommand(Command cmd) { if (selectCommand != SELECT_COMMAND) { removeCommand(selectCommand); } if (cmd != null) { addCommand(selectCommand = cmd); } else { selectCommand = SELECT_COMMAND; } } @Override public int append(String stringPart, Image imagePart) { synchronized (selected) { int index = selected.size(); boolean select = index == 0 && listType != MULTIPLE; strings.add(stringPart); images.add(imagePart); selected.add(select); if (select) { selectedIndex = index; } if (list != null) { adapter.append(stringPart, imagePart); } return index; } } @Override public void delete(int elementNum) { synchronized (selected) { strings.remove(elementNum); images.remove(elementNum); selected.remove(elementNum); if (selected.size() == 0) { selectedIndex = -1; } if (list != null) { adapter.delete(elementNum); } } } @Override public void deleteAll() { synchronized (selected) { strings.clear(); images.clear(); selected.clear(); selectedIndex = -1; if (list != null) { adapter.deleteAll(); } } } @Override public Image getImage(int elementNum) { return images.get(elementNum); } @Override public int getSelectedFlags(boolean[] selectedArray) { synchronized (selected) { if (selectedArray.length < selected.size()) { throw new IllegalArgumentException("return array is too short"); } int index = 0; int selectedCount = 0; for (Boolean flag : selected) { if (flag) { selectedCount++; } selectedArray[index++] = flag; } while (index < selectedArray.length) { selectedArray[index++] = false; } return selectedCount; } } @Override public int getSelectedIndex() { return selectedIndex; } @Override public String getString(int elementNum) { return strings.get(elementNum); } @Override public void insert(int elementNum, String stringPart, Image imagePart) { synchronized (selected) { boolean select = selected.size() == 0 && listType != MULTIPLE; strings.add(elementNum, stringPart); images.add(elementNum, imagePart); selected.add(elementNum, select); if (select) { selectedIndex = elementNum; } if (list != null) { adapter.insert(elementNum, stringPart, imagePart); } } } @Override public boolean isSelected(int elementNum) { synchronized (selected) { return selected.get(elementNum); } } @Override public void set(int elementNum, String stringPart, Image imagePart) { synchronized (selected) { strings.set(elementNum, stringPart); images.set(elementNum, imagePart); if (list != null) { adapter.set(elementNum, stringPart, imagePart); } } } @Override public void setSelectedFlags(boolean[] selectedArray) { if (listType == EXCLUSIVE || listType == IMPLICIT) { for (int i = 0; i < selectedArray.length; i++) { if (selectedArray[i]) { setSelectedIndex(i, true); return; } } } synchronized (selected) { if (selectedArray.length < selected.size()) { throw new IllegalArgumentException("array is too short"); } int size = selected.size(); for (int i = 0; i < size; i++) { selected.set(i, selectedArray[i]); } } } @Override public void setSelectedIndex(int elementNum, boolean flag) { synchronized (selected) { selected.set(elementNum, flag); if (flag) { selectedIndex = elementNum; } if (list != null) { if (flag) { ViewHandler.postEvent(msgSetSelection); } } } } public void setFont(int elementNum, Font font) { } public Font getFont(int elementNum) { return Font.getDefaultFont(); } public void setFitPolicy(int fitPolicy) { this.fitPolicy = fitPolicy; } public int getFitPolicy() { return fitPolicy; } @Override public int size() { synchronized (selected) { return selected.size(); } } @Override public View getScreenView() { Context context = getParentActivity(); adapter = new CompoundListAdapter(context, this, listType); list = new ListView(context); list.setAdapter(adapter); int size = selected.size(); for (int i = 0; i < size; i++) { adapter.append(strings.get(i), images.get(i)); } if (listType == IMPLICIT && selectedIndex >= 0 && selectedIndex < selected.size()) { list.setSelection(selectedIndex); } list.setOnItemClickListener(clicklistener); return list; } @Override public void clearScreenView() { list = null; adapter = null; } }
Fix incorrect list selection
app/src/main/java/javax/microedition/lcdui/List.java
Fix incorrect list selection
<ide><path>pp/src/main/java/javax/microedition/lcdui/List.java <ide> fireCommandAction(selectCommand, List.this); <ide> break; <ide> case EXCLUSIVE: <del> if (selectedIndex >= 0 && selectedIndex < selected.size()) { <del> selected.set(selectedIndex, Boolean.FALSE); <del> } <ide> if (position >= 0 && position < selected.size()) { <add> Collections.fill(selected, Boolean.FALSE); <ide> selected.set(position, Boolean.TRUE); <ide> } <ide> break;
JavaScript
mit
f3040a10da456ac6f6f1bef5db79af86629c9699
0
yishn/Sabaki,yishn/Goban,yishn/Goban,yishn/Sabaki
var remote = require('remote') var fs = require('fs') var shell = require('shell') var sgf = require('../module/sgf.js') var uuid = require('../lib/node-uuid') var process = remote.require('process') var app = remote.require('app'); var dialog = remote.require('dialog') var setting = remote.require('./module/setting.js') var Menu = remote.require('menu') var Tuple = require('../lib/tuple') var Board = require('../module/board.js') var Scrollbar = require('../lib/gemini-scrollbar') /** * Getters and setters */ function setIsLoading(loading) { if (loading) document.body.addClass('loading') else $('loading').tween('opacity', 0).get('tween').addEvent('complete', function() { document.body.removeClass('loading') $('loading').setStyle('opacity', null) }) } function getShowVariations() { return $('goban').hasClass('variations') } function setShowVariations(show) { if (show) $('goban').addClass('variations') else $('goban').removeClass('variations') setting.set('view.show_variations', show) } function getFuzzyStonePlacement() { return $('goban').hasClass('fuzzy') } function setFuzzyStonePlacement(fuzzy) { if (fuzzy) $('goban').addClass('fuzzy') else $('goban').removeClass('fuzzy') setting.set('view.fuzzy_stone_placement', fuzzy) } function getShowCoordinates() { return $('goban').hasClass('coordinates') } function setShowCoordinates(show) { if (show) $('goban').addClass('coordinates') else $('goban').removeClass('coordinates') setting.set('view.show_coordinates', show) } function getShowSidebar() { return document.body.hasClass('sidebar') } function setShowSidebar(show) { if (show) document.body.addClass('sidebar') else document.body.removeClass('sidebar') $('sidebar').setStyle('width', setting.get('view.sidebar_width')) $('main').setStyle('right', show ? setting.get('view.sidebar_width') : 0) setting.set('view.show_sidebar', show) if (show) { updateGraph() updateSlider() } else { // Clear game graph var s = $('graph').retrieve('sigma') if (s) { s.graph.clear() s.refresh() } } // Resize window var win = remote.getCurrentWindow() var size = win.getContentSize() if (win.isMaximized()) return win.setContentSize(size[0] + (show ? 1 : -1) * setting.get('view.sidebar_width').toInt(), size[1]) } function getSidebarWidth() { return $('sidebar').getStyle('width').toInt() } function setSidebarWidth(width) { $('sidebar').setStyle('width', width) $$('.sidebar #main').setStyle('right', width) } function getPlayerName(sign) { return $$('#player_' + sign + ' .name')[0].get('text') } function setPlayerName(sign, name) { if (name.trim() == '') name = sign > 0 ? 'Black' : 'White' $$('#player_' + sign + ' .name')[0].set('text', name) } function getCaptures() { return { '-1': $$('#player_-1 .captures')[0].get('text').toInt(), '1': $$('#player_1 .captures')[0].get('text').toInt() } } function setCaptures(captures) { $$('#player_-1 .captures')[0].set('text', captures['-1']) .setStyle('opacity', captures['-1'] == 0 ? 0 : .7) $$('#player_1 .captures')[0].set('text', captures['1']) .setStyle('opacity', captures['1'] == 0 ? 0 : .7) } function getCurrentPlayer() { return $$('.currentplayer')[0].get('src') == '../img/ui/blacktoplay.png' ? 1 : -1 } function setCurrentPlayer(sign) { $$('.currentplayer').set('src', sign > 0 ? '../img/ui/blacktoplay.png' : '../img/ui/whitetoplay.png') } function getSliderValue() { return $$('#sidebar .slider div')[0].getStyle('height').toInt() } function setSliderValue(value) { $$('#sidebar .slider div')[0].setStyle('height', value + '%') } function getRootTree() { if (!getCurrentTreePosition()) return null return getCurrentTreePosition().unpack(function(tree, index) { while (tree.parent != null) tree = tree.parent return tree }) } function setRootTree(tree) { if (tree.nodes.length == 0) return tree.parent = null setCurrentTreePosition(sgf.addBoard(tree), 0) // Update UI if (getShowSidebar()) { updateGraph() updateSlider() } if ('PB' in tree.nodes[0]) setPlayerName(1, tree.nodes[0].PB[0]) if ('PW' in tree.nodes[0]) setPlayerName(-1, tree.nodes[0].PW[0]) } function getGraphMatrixDict() { return $('graph').retrieve('graphmatrixdict') } function setGraphMatrixDict(matrixdict) { if (!getShowSidebar()) return var s = $('graph').retrieve('sigma') $('graph').store('graphmatrixdict', matrixdict) s.graph.clear() s.graph.read(sgf.matrix2graph(matrixdict)) s.refresh() } function setCurrentTreePosition(tree, index) { if (!tree || getScoringMode()) return // Remove current graph node color var n = getCurrentGraphNode() if (n && n != getGraphNode(tree, index)) delete n.color $('goban').store('position', new Tuple(tree, index)) // Set current path var t = tree while (t.parent) { t.parent.current = t.parent.subtrees.indexOf(t) t = t.parent } // Update graph and slider var n = getCurrentGraphNode() if (n) { setTimeout(function() { if (getCurrentGraphNode() != n) return centerGraphCameraAt(n) updateSlider() }, 300) } setBoard(sgf.addBoard(tree, index).nodes[index].board) // Determine current player setCurrentPlayer(1) if ('B' in tree.nodes[index]) setCurrentPlayer(-1) else if ('W' in tree.nodes[index]) setCurrentPlayer(1) else if ('PL' in tree.nodes[index]) setCurrentPlayer(tree.nodes[index].PL[0] == 'W' ? -1 : 1) else if ('HA' in tree.nodes[index] && tree.nodes[index].HA[0].toInt() >= 1) setCurrentPlayer(-1) } function getCurrentTreePosition() { return $('goban').retrieve('position') } function getCurrentGraphNode() { if (!getCurrentTreePosition()) return null return getCurrentTreePosition().unpack(getGraphNode) } function getGraphNode(tree, index) { var s = $('graph').retrieve('sigma') return s.graph.nodes(tree.id + '-' + index) } function getSelectedTool() { var li = $$('#edit .selected')[0] var tool = li.get('class').replace('selected', '').replace('-tool', '').trim() if (tool == 'stone') { return li.getElement('img').get('src').contains('_1') ? 'stone_1' : 'stone_-1' } else { return tool } } function getBoard() { return $('goban').retrieve('board') } function setBoard(board) { if (!getBoard() || getBoard().size != board.size) { $('goban').store('board', board) buildBoard() } $('goban').store('board', board) setCaptures(board.captures) for (var x = 0; x < board.size; x++) { for (var y = 0; y < board.size; y++) { var li = $('goban').getElement('.pos_' + x + '-' + y) var sign = board.arrangement[li.retrieve('tuple')] var types = ['ghost_1', 'ghost_-1', 'circle', 'triangle', 'cross', 'square', 'label', 'point'] types.each(function(x) { if (li.hasClass(x)) li.removeClass(x) }) if (li.retrieve('tuple') in board.overlays) { board.overlays[li.retrieve('tuple')].unpack(function(type, ghost, label) { if (type != '') li.addClass(type) if (ghost != 0) li.addClass('ghost_' + ghost) if (label != '') li.set('data-label', label) }) } if (li.hasClass('sign_' + sign)) continue for (var i = -1; i <= 1; i++) { if (li.hasClass('sign_' + i)) li.removeClass('sign_' + i) } li.addClass('sign_' + sign) .getElement('img').set('src', '../img/goban/stone_' + sign + '.png') } } } function getEditMode() { return $('bar').hasClass('edit') } function setEditMode(editMode) { if (editMode) { $('bar').addClass('edit') closeScore() closeGameInfo() } else { $('bar').removeClass('edit') } } function getScoringMode() { return $$('body')[0].hasClass('scoring') } function setScoringMode(scoringMode) { if (scoringMode) { $$('body').addClass('scoring') setEditMode(false) closeGameInfo() var deadstones = getBoard().guessDeadStones() deadstones.each(function(v) { $$('#goban .pos_' + v[0] + '-' + v[1]).addClass('dead') }) updateAreaMap() } else { $$('body').removeClass('scoring') $$('.dead').removeClass('dead') } } function getScoringMethod() { return $$('#score .method .territory')[0].hasClass('current') ? 'territory' : 'area' } function setScoringMethod(method) { $$('#score .method li').removeClass('current') $$('#score .method .' + method).addClass('current') $$('#score tr > *').addClass('disabled') $$('#score table .' + method).removeClass('disabled') setting.set('scoring.method', method) // Update UI for (var sign = -1; sign <= 1; sign += 2) { var tr = $$('#score tbody tr' + (sign < 0 ? ':last-child' : ''))[0] var tds = tr.getElements('td') tds[4].set('text', 0) for (var i = 0; i <= 3; i++) { if (tds[i].hasClass('disabled') || isNaN(tds[i].get('text').toFloat())) continue tds[4].set('text', tds[4].get('text').toFloat() + tds[i].get('text').toFloat()) } } } /** * Methods */ function loadSettings() { if (setting.get('view.fuzzy_stone_placement')) $('goban').addClass('fuzzy') if (setting.get('view.show_coordinates')) $('goban').addClass('coordinates') if (setting.get('view.show_variations')) $('goban').addClass('variations') if (setting.get('view.show_sidebar')) { document.body.addClass('sidebar') setSidebarWidth(setting.get('view.sidebar_width')) } } function prepareGameGraph() { var container = $('graph') var s = new sigma(container) s.settings({ defaultNodeColor: '#eee', defaultEdgeColor: '#eee', defaultNodeBorderColor: 'rgba(255,255,255,.2)', edgeColor: 'default', borderSize: 2, zoomMax: 1, zoomMin: 1, autoResize: false, autoRescale: false }) s.bind('clickNode', function(e) { e.data.node.data.unpack(function(tree, index) { setCurrentTreePosition(tree, index) }) }).bind('rightClickNode', function(e) { e.data.node.data.unpack(function(tree, index) { openNodeMenu(tree, index) }) }) container.store('sigma', s) } function selectTool(tool) { $$('#edit .' + tool + '-tool a').fireEvent('click') } function makeMove(vertex) { if (getBoard().hasVertex(vertex) && getBoard().arrangement[vertex] != 0) return var position = getCurrentTreePosition() var tree = position[0], index = position[1] var color = getCurrentPlayer() > 0 ? 'B' : 'W' var sign = color == 'B' ? 1 : -1 // Check for ko var ko = sgf.navigate(tree, index, -1).unpack(function(prevTree, prevIndex) { if (!prevTree) return var hash = getBoard().makeMove(sign, vertex).getHash() return prevTree.nodes[prevIndex].board.getHash() == hash }) if (ko) { var button = dialog.showMessageBox(remote.getCurrentWindow(), { type: 'info', title: 'Goban', buttons: ['Play Anyway', 'Don’t Play', 'Cancel'], message: 'You are about to play a move which repeats a previous board position. ' + 'This is invalid in some rulesets.' }) if (button != 0) return } // Play sounds if (getBoard().hasVertex(vertex)) { // Detect captured stones if (getBoard().getNeighborhood(vertex).some(function(v) { return getBoard().arrangement[v] == -sign && getBoard().getLiberties(v).length == 1 })) setTimeout(function() { new Audio('../sound/capture' + Math.floor(Math.random() * 5) + '.wav').play() }, 400 + Math.floor(Math.random() * 200)) new Audio('../sound/' + Math.floor(Math.random() * 5) + '.wav').play() } else new Audio('../sound/pass.wav').play() // Randomize shift and readjust var li = $$('#goban .pos_' + vertex[0] + '-' + vertex[1]) var direction = Math.floor(Math.random() * 9) for (var i = 0; i < 9; i++) li.removeClass('shift_' + i) li.addClass('shift_' + direction) if (direction == 1 || direction == 5 || direction == 8) { // Left $$('#goban .pos_' + (vertex[0] - 1) + '-' + vertex[1]) .removeClass('shift_3').removeClass('shift_7').removeClass('shift_6') } else if (direction == 2 || direction == 5 || direction == 6) { // Top $$('#goban .pos_' + vertex[0] + '-' + (vertex[1] - 1)) .removeClass('shift_4').removeClass('shift_7').removeClass('shift_8') } else if (direction == 3 || direction == 7 || direction == 6) { // Right $$('#goban .pos_' + (vertex[0] + 1) + '-' + vertex[1]) .removeClass('shift_1').removeClass('shift_5').removeClass('shift_8') } else if (direction == 4 || direction == 7 || direction == 8) { // Bottom $$('#goban .pos_' + vertex[0] + '-' + (vertex[1] + 1)) .removeClass('shift_2').removeClass('shift_5').removeClass('shift_6') } if (tree.current == null && tree.nodes.length - 1 == index) { // Append move var node = {} node[color] = [sgf.vertex2point(vertex)] tree.nodes.push(node) setCurrentTreePosition(tree, tree.nodes.length - 1) } else { if (index != tree.nodes.length - 1) { // Search for next move var nextNode = tree.nodes[index + 1] var moveExists = color in nextNode && sgf.point2vertex(nextNode[color][0]).equals(vertex) if (moveExists) { setCurrentTreePosition(tree, index + 1) return } } else { // Search for variation var variations = tree.subtrees.filter(function(subtree) { return subtree.nodes.length > 0 && color in subtree.nodes[0] && sgf.point2vertex(subtree.nodes[0][color][0]).equals(vertex) }) if (variations.length > 0) { setCurrentTreePosition(sgf.addBoard(variations[0]), 0) return } } // Create variation var splitted = sgf.splitTree(tree, index) var node = {}; node[color] = [sgf.vertex2point(vertex)] var newtree = { id: uuid.v4(), nodes: [node], subtrees: [], parent: splitted, current: null } splitted.subtrees.push(newtree) splitted.current = splitted.subtrees.length - 1 sgf.addBoard(newtree, newtree.nodes.length - 1) setCurrentTreePosition(newtree, 0) } updateGraph() updateSlider() } function updateGraph() { if (!getShowSidebar() || !getRootTree()) return setGraphMatrixDict(sgf.tree2matrixdict(getRootTree())) centerGraphCameraAt(getCurrentGraphNode()) } function updateSlider() { if (!getShowSidebar()) return getCurrentTreePosition().unpack(function(tree, index) { var total = sgf.getCurrentHeight(getRootTree()) - 1 var relative = total + 1 - sgf.getCurrentHeight(tree) + index setSliderValue(total == 0 ? 0 : relative * 100 / total) }) } function vertexClicked() { closeGameInfo() if (!getEditMode() && !getScoringMode()) { if (event.button != 0) return makeMove(this) return } // Scoring mode activated if (getScoringMode()) { if (getBoard().arrangement[this] == 0) return getBoard().getRelatedChains(this).each(function(vertex) { $$('#goban .pos_' + vertex[0] + '-' + vertex[1]).toggleClass('dead') }) updateAreaMap() return } // Edit mode activated getCurrentTreePosition().unpack(function(tree, index) { var node = tree.nodes[index] var tool = getSelectedTool() var board = getBoard() var dictionary = { 'cross': 'MA', 'triangle': 'TR', 'circle': 'CR', 'square': 'SQ', 'number': 'LB', 'label': 'LB' } if (tool.contains('stone')) { if ('B' in node || 'W' in node) { // New variation needed var splitted = sgf.splitTree(tree, index) if (splitted != tree || splitted.subtrees.length != 0) { tree = { nodes: [], subtrees: [], current: null, parent: splitted } splitted.subtrees.push(tree) } node = { PL: getCurrentPlayer() > 0 ? ['B'] : ['W'] } index = tree.nodes.length tree.nodes.push(node) } var sign = tool.contains('_1') ? 1 : -1 if (event.button == 2) sign = -sign var oldSign = board.arrangement[this] var ids = ['AW', 'AE', 'AB'] var id = ids[sign + 1] var point = sgf.vertex2point(this) for (var i = -1; i <= 1; i++) { if (!(ids[i + 1] in node)) continue k = node[ids[i + 1]].indexOf(point) if (k >= 0) { node[ids[i + 1]].splice(k, 1) if (node[ids[i + 1]].length == 0) { delete node[ids[i + 1]] } } } if (oldSign != sign) { if (id in node) node[id].push(point) else node[id] = [point] } else if (oldSign == sign) { if ('AE' in node) node.AE.push(point) else node.AE = [point] } } else { if (event.button != 0) return if (tool != 'label' && tool != 'number') { if (this in board.overlays && board.overlays[this][0] == tool) { delete board.overlays[this] } else { board.overlays[this] = new Tuple(tool, 0, '') } } else if (tool == 'number') { if (this in board.overlays && board.overlays[this][0] == 'label') { delete board.overlays[this] } else { var number = 1 if ('LB' in node) { node.LB.each(function(value) { var label = value.substr(3).toInt() if (!isNaN(label)) number = Math.max(number, label + 1) }) } board.overlays[this] = new Tuple(tool, 0, number.toString()) } } else if (tool == 'label') { if (this in board.overlays && board.overlays[this][0] == 'label') { delete board.overlays[this] } else { var alpha = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' var k = 0 if ('LB' in node) { node.LB.each(function(value) { if (value.length != 4 || !alpha.contains(value[3])) return var label = value[3] k = Math.max(k, (alpha.indexOf(label) + 1) % alpha.length) }) } board.overlays[this] = new Tuple(tool, 0, alpha[k]) } } Object.each(dictionary, function(id) { delete node[id] }) $$('#goban .row li').each(function(li) { var vertex = li.retrieve('tuple') if (!(vertex in board.overlays)) return var id = dictionary[board.overlays[vertex][0]] var pt = sgf.vertex2point(vertex) if (id == 'LB') pt += ':' + board.overlays[vertex][2] if (id in node) node[id].push(pt) else node[id] = [pt] }) } setCurrentTreePosition(tree, index) }.bind(this)) } function buildBoard() { var board = getBoard() var rows = [] var hoshi = board.getHandicapPlacement(9) for (var y = 0; y < board.size; y++) { var ol = new Element('ol.row') for (var x = 0; x < board.size; x++) { var vertex = new Tuple(x, y) var li = new Element('li.pos_' + x + '-' + y) .store('tuple', vertex) .addClass('shift_' + Math.floor(Math.random() * 9)) var img = new Element('img', { src: '../img/goban/stone_0.png' }) if (hoshi.some(function(v) { return v.equals(vertex) })) li.addClass('hoshi') ol.adopt(li.adopt(img) .addEvent('mouseup', function() { if (!$('goban').retrieve('mousedown')) return $('goban').store('mousedown', false) vertexClicked.call(this) }.bind(vertex)) .addEvent('mousedown', function() { $('goban').store('mousedown', true) }) ) } rows.push(ol) } var alpha = 'ABCDEFGHJKLMNOPQRSTUVWXYZ' var coordx = new Element('ol.coordx') var coordy = new Element('ol.coordy') for (var i = 0; i < board.size; i++) { coordx.adopt(new Element('li', { text: alpha[i] })) coordy.adopt(new Element('li', { text: board.size - i })) } var goban = $$('#goban div')[0] goban.empty().adopt(rows, coordx, coordy) goban.grab(coordx.clone(), 'top').grab(coordy.clone(), 'top') resizeBoard() } function resizeBoard() { var board = getBoard() if (!board) return var width = $('goban').getStyle('width').toInt() var height = $('goban').getStyle('height').toInt() var min = Math.min(width, height) var hasCoordinates = getShowCoordinates() var fieldsize = Math.floor(min / board.size) min = fieldsize * board.size if (hasCoordinates) { fieldsize = Math.floor(min / (board.size + 2)) min = fieldsize * (board.size + 2) } $$('#goban > div').setStyle('width', min).setStyle('height', min) $$('#goban .row, #goban .coordx').setStyle('height', fieldsize).setStyle('line-height', fieldsize) $$('#goban .row, #goban .coordx').setStyle('margin-left', hasCoordinates ? fieldsize : 0) $$('#goban .coordy').setStyle('width', fieldsize).setStyle('top', fieldsize).setStyle('line-height', fieldsize) $$('#goban .coordy:last-child').setStyle('left', fieldsize * (board.size + 1)) $$('#goban li').setStyle('width', fieldsize).setStyle('height', fieldsize) } function showGameInfo() { closeScore() var tree = getRootTree() var rootNode = tree.nodes[0] var info = $('info') info.addClass('show').getElement('input[name="name_1"]').focus() info.getElement('input[name="name_1"]').set('value', getPlayerName(1)) info.getElement('input[name="name_-1"]').set('value', getPlayerName(-1)) info.getElement('input[name="rank_1"]').set('value', 'BR' in rootNode ? rootNode.BR[0] : '') info.getElement('input[name="rank_-1"]').set('value', 'WR' in rootNode ? rootNode.WR[0] : '') info.getElement('input[name="result"]').set('value', 'RE' in rootNode ? rootNode.RE[0] : '') info.getElement('input[name="komi"]').set('value', 'KM' in rootNode ? rootNode.KM[0].toFloat() : '') var size = info.getElement('input[name="size"]') size.set('value', 'SZ' in rootNode ? rootNode.SZ[0] : '') var handicap = info.getElement('select[name="handicap"]') if ('HA' in rootNode) handicap.selectedIndex = rootNode.HA[0].toInt() - 1 else handicap.selectedIndex = 0 var disabled = tree.nodes.length > 1 || tree.subtrees.length > 0 handicap.disabled = disabled size.disabled = disabled } function closeGameInfo() { $('info').removeClass('show') } function updateGameInfo() { var rootNode = getRootTree().nodes[0] var info = $('info') rootNode.BR = [info.getElement('input[name="rank_1"]').get('value').trim()] rootNode.WR = [info.getElement('input[name="rank_-1"]').get('value').trim()] rootNode.PB = [info.getElement('input[name="name_1"]').get('value').trim()] rootNode.PW = [info.getElement('input[name="name_-1"]').get('value').trim()] setPlayerName(1, rootNode.PB[0]) setPlayerName(-1, rootNode.PW[0]) var result = info.getElement('input[name="result"]').get('value').trim() rootNode.RE = [result] if (result == '') delete rootNode.RE var komi = info.getElement('input[name="komi"]').get('value').toFloat() rootNode.KM = [String.from(komi)] if (isNaN(komi)) rootNode.KM = ['0'] var handicap = info.getElement('select[name="handicap"]').selectedIndex if (handicap == 0) delete rootNode.HA else rootNode.HA = [String.from(handicap + 1)] var size = info.getElement('input[name="size"]').get('value').toInt() rootNode.SZ = [String.from(Math.max(Math.min(size, 26), 9))] if (isNaN(size)) rootNode.SZ = ['19'] if (!info.getElement('select[name="handicap"]').disabled) { setCurrentTreePosition(getRootTree(), 0) if (!('HA' in rootNode)) { delete rootNode.AB } else { var board = getBoard() var stones = board.getHandicapPlacement(rootNode.HA[0].toInt()) rootNode.AB = [] for (var i = 0; i < stones.length; i++) { rootNode.AB.push(sgf.vertex2point(stones[i])) } } setCurrentTreePosition(getRootTree(), 0) } } function showScore() { var board = $('goban').retrieve('finalboard') var score = board.getScore($('goban').retrieve('areamap')) var rootNode = getRootTree().nodes[0] for (var sign = -1; sign <= 1; sign += 2) { var tr = $$('#score tbody tr' + (sign < 0 ? ':last-child' : ''))[0] var tds = tr.getElements('td') tds[0].set('text', score['area_' + sign]) tds[1].set('text', score['territory_' + sign]) tds[2].set('text', score['captures_' + sign]) if (sign < 0) tds[3].set('text', ('KM' in rootNode ? rootNode.KM[0] : '0').toFloat()) tds[4].set('text', 0) setScoringMethod(setting.get('scoring.method')) } closeGameInfo() $('score').addClass('show') } function closeScore() { $('score').removeClass('show') setScoringMode(false) } function updateScore() { var rootNode = getRootTree().nodes[0] var results = $$('#score tbody td:last-child').get('text') var diff = results[0].toFloat() - results[1].toFloat() var result = diff > 0 ? 'B+' : (diff < 0 ? 'W+' : 'Draw') if (diff != 0) result = result + Math.abs(diff) rootNode.RE = [result] } function updateAreaMap() { var board = getBoard().makeMove(0) $$('#goban .row li.dead').each(function(li) { if (li.hasClass('sign_1')) board.captures['-1']++ else if (li.hasClass('sign_-1')) board.captures['1']++ board.arrangement[li.retrieve('tuple')] = 0 }) var map = board.getAreaMap() $$('#goban .row li').each(function(li) { li.removeClass('area_-1').removeClass('area_0').removeClass('area_1') .addClass('area_' + map[li.retrieve('tuple')]) if (!li.getElement('div.area')) li.grab(new Element('div', { class: 'area' })) }) $('goban').store('areamap', map) .store('finalboard', board) } function prepareEditTools() { $$('#edit ul a').addEvent('click', function() { if (!this.getParent().hasClass('selected')) { $$('#edit .selected').removeClass('selected') this.getParent().addClass('selected') } else if (this.getParent().hasClass('stone-tool')) { var img = this.getElement('img') var black = img.get('src') == '../img/edit/stone_1.png' img.set('src', black ? '../img/edit/stone_-1.png' : '../img/edit/stone_1.png') } }) } function wireEvents() { $('goban').addEvent('mousewheel', function(e) { if (e.wheel < 0) goForward() else if (e.wheel > 0) goBack() }) // Resize sidebar $$('#sidebar .verticalresizer').addEvent('mousedown', function() { if (event.button != 0) return $('sidebar').store('initpos', new Tuple(event.x, getSidebarWidth())) }) document.body.addEvent('mouseup', function() { if (!$('sidebar').retrieve('initpos')) return $('sidebar').store('initpos', null) if ($('graph').retrieve('sigma')) $('graph').retrieve('sigma').renderers[0].resize().render() setting.set('view.sidebar_width', getSidebarWidth()) }).addEvent('mousemove', function() { var initPos = $('sidebar').retrieve('initpos') if (!initPos) return initPos.unpack(function(initX, initWidth) { setSidebarWidth(initWidth - event.x + initX) resizeBoard() }) }) } function centerGraphCameraAt(node) { if (!getShowSidebar()) return var s = $('graph').retrieve('sigma') node.color = '#E64533' s.refresh() sigma.misc.animation.camera( s.camera, { x: node[s.camera.readPrefix + 'x'], y: node[s.camera.readPrefix + 'y'] }, { duration: 300 } ) } /** * Menu */ function newGame(playSound) { var buffer = ';GM[1]AP[' + app.getName() + ':' + app.getVersion() + ']' buffer += 'GM[1]CA[UTF-8]PB[Black]PW[White]KM[6.5]SZ[19]' var tree = sgf.parse(sgf.tokenize(buffer)) setRootTree(tree) if (arguments.length >= 1 && playSound) { // Called from menu new Audio('../sound/newgame.wav').play() showGameInfo() } closeScore() } function loadGame(filename) { setIsLoading(true) if (arguments.length == 0) { var result = dialog.showOpenDialog(remote.getCurrentWindow(), { filters: [{ name: 'SGF Files', extensions: ['sgf'] }, { name: 'All Files', extensions: ['*'] }] }) if (result) filename = result[0] } if (filename) { var tree = sgf.parseFile(filename) if (tree.subtrees.length == 0) return setRootTree(tree.subtrees[0]) } setIsLoading(false) closeGameInfo() closeScore() } function saveGame() { setIsLoading(true) var result = dialog.showSaveDialog(remote.getCurrentWindow(), { filters: [{ name: 'SGF Files', extensions: ['sgf'] }, { name: 'All Files', extensions: ['*'] }] }) if (result) { var tree = getRootTree() var text = '(' + sgf.tree2string(tree) + ')' fs.writeFile(result, text) } setIsLoading(false) } function goBack() { getCurrentTreePosition().unpack(function(tree, position) { sgf.navigate(tree, position, -1).unpack(function(prevTree, prevIndex) { setCurrentTreePosition(prevTree, prevIndex) }) }) } function goForward() { getCurrentTreePosition().unpack(function(tree, position) { sgf.navigate(tree, position, 1).unpack(function(nextTree, nextIndex) { setCurrentTreePosition(nextTree, nextIndex) }) }) } function goToNextFork() { getCurrentTreePosition().unpack(function(tree, index) { if (index != tree.nodes.length - 1) setCurrentTreePosition(tree, tree.nodes.length - 1) else if (tree.current != null) { var subtree = tree.subtrees[tree.current] setCurrentTreePosition(subtree, subtree.nodes.length - 1) } }) } function goToPreviousFork() { getCurrentTreePosition().unpack(function(tree, index) { if (tree.parent == null || tree.parent.nodes.length == 0) setCurrentTreePosition(tree, 0) else setCurrentTreePosition(tree.parent, tree.parent.nodes.length - 1) }) } function goToBeginning() { var tree = getRootTree() if (tree.nodes.length == 0) return setCurrentTreePosition(tree, 0) } function goToEnd() { getCurrentTreePosition().unpack(function(tree, position) { var t = tree while (t.current != null) { t = t.subtrees[t.current] } setCurrentTreePosition(t, t.nodes.length - 1) }) } function goToNextVariation() { getCurrentTreePosition().unpack(function(tree, index) { if (!tree.parent) return var mod = tree.parent.subtrees.length tree.parent.current = (tree.parent.current + 1) % mod setCurrentTreePosition(tree.parent.subtrees[tree.parent.current], 0) }) } function goToPreviousVariation() { getCurrentTreePosition().unpack(function(tree, index) { if (!tree.parent) return var mod = tree.parent.subtrees.length tree.parent.current = (tree.parent.current + mod - 1) % mod setCurrentTreePosition(tree.parent.subtrees[tree.parent.current], 0) }) } function removeNode(tree, index) { if (!tree.parent && index == 0) { dialog.showMessageBox(remote.getCurrentWindow(), { type: 'warning', title: 'Goban', buttons: ['OK'], message: 'The root node cannot be removed.' }) return } var prev = sgf.navigate(tree, index, -1) if (index != 0) { tree.nodes.splice(index, tree.nodes.length) tree.current = null tree.subtrees.length = 0 } else { var parent = tree.parent var i = parent.subtrees.indexOf(tree) parent.subtrees.splice(i, 1) if (parent.current >= i) parent.current-- sgf.reduceTree(parent) } setGraphMatrixDict(sgf.tree2matrixdict(getRootTree())) if (!getCurrentGraphNode()) { setCurrentTreePosition(prev[0], prev[1]) } else { centerGraphCameraAt(getCurrentGraphNode()) } } function buildMenu() { var template = [ { label: '&Game', submenu: [ { label: '&New', accelerator: 'CmdOrCtrl+N', click: function() { newGame(true) } }, { label: '&Load…', accelerator: 'CmdOrCtrl+O', click: function() { loadGame() } }, // { type: 'separator' }, // { // label: '&Save', // accelerator: 'CmdOrCtrl+S' // }, { label: 'Save &As…', accelerator: 'CmdOrCtrl+S', click: function() { saveGame() } }, { type: 'separator' }, { label: '&Info', accelerator: 'CmdOrCtrl+I', click: showGameInfo } ] }, { label: '&Edit', submenu: [ { label: 'Toggle &Edit Mode', accelerator: 'CmdOrCtrl+E', click: function() { setEditMode(!getEditMode()) } }, { type: 'separator' }, { label: '&Stone Tool', accelerator: 'CmdOrCtrl+1', click: function() { setEditMode(true) selectTool('stone') } }, { label: '&Cross Tool', accelerator: 'CmdOrCtrl+2', click: function() { setEditMode(true) selectTool('cross') } }, { label: '&Triangle Tool', accelerator: 'CmdOrCtrl+3', click: function() { setEditMode(true) selectTool('triangle') } }, { label: '&Square Tool', accelerator: 'CmdOrCtrl+4', click: function() { setEditMode(true) selectTool('square') } }, { label: '&Circle Tool', accelerator: 'CmdOrCtrl+5', click: function() { setEditMode(true) selectTool('circle') } }, { label: '&Label Tool', accelerator: 'CmdOrCtrl+6', click: function() { setEditMode(true) selectTool('label') } }, { label: '&Number Tool', accelerator: 'CmdOrCtrl+7', click: function() { setEditMode(true) selectTool('number') } } ] }, { label: '&Navigation', submenu: [ { label: '&Back', accelerator: 'Up', click: goBack }, { label: '&Forward', accelerator: 'Down', click: goForward }, { type: 'separator' }, { label: 'Go To &Previous Fork', accelerator: 'CmdOrCtrl+Up', click: goToPreviousFork }, { label: 'Go To &Next Fork', accelerator: 'CmdOrCtrl+Down', click: goToNextFork }, { type: 'separator' }, { label: 'Go To &Beginning', accelerator: 'CmdOrCtrl+Home', click: goToBeginning }, { label: 'Go To &End', accelerator: 'CmdOrCtrl+End', click: goToEnd }, { type: 'separator' }, { label: 'Go To Next Variatio&n', accelerator: 'Right', click: goToNextVariation }, { label: 'Go To Previous &Variation', accelerator: 'Left', click: goToPreviousVariation } ] }, { label: '&View', submenu: [ { label: '&Fuzzy Stone Placement', type: 'checkbox', checked: getFuzzyStonePlacement(), click: function() { setFuzzyStonePlacement(!getFuzzyStonePlacement()) } }, { label: 'Show &Coordinates', type: 'checkbox', checked: getShowCoordinates(), click: function() { setShowCoordinates(!getShowCoordinates()) resizeBoard() } }, { label: 'Show &Variations', type: 'checkbox', checked: getShowVariations(), click: function() { setShowVariations(!getShowVariations()) } }, { type: 'separator' }, { label: 'Show Game &Graph', type: 'checkbox', checked: getShowSidebar(), click: function() { setShowSidebar(!getShowSidebar()) resizeBoard() } } ] }, { label: '&Help', submenu: [ { label: app.getName(), enabled: false }, { label: 'Version ' + app.getVersion(), enabled: false }, { type: 'separator' }, { label: 'Issues', click: function() { shell.openExternal('https://github.com/yishn/Goban/issues') } }, { label: 'GitHub Respository', click: function() { shell.openExternal('https://github.com/yishn/Goban') } } ] } ] Menu.setApplicationMenu(Menu.buildFromTemplate(template)) } function openHeaderMenu() { var template = [ { label: '&Pass', click: function() { makeMove(new Tuple(-1, -1)) } }, { label: '&Score', click: function() { setScoringMode(true) } }, { type: 'separator' }, { label: '&Edit', click: function() { setEditMode(true) } }, { label: '&Info', click: showGameInfo } ] menu = Menu.buildFromTemplate(template) menu.popup(remote.getCurrentWindow(), $('headermenu').getPosition().x, $$('header')[0].getCoordinates().top) } function openNodeMenu(tree, index) { if (getScoringMode()) return var template = [ { label: '&Remove', click: function() { removeNode(tree, index) } } ] menu = Menu.buildFromTemplate(template) menu.popup(remote.getCurrentWindow(), event.x, event.y) } /** * Main events */ document.addEvent('keydown', function(e) { if (e.code == 123) { // F12 remote.getCurrentWindow().toggleDevTools() } else if (e.code == 116) { // F5 location.reload() } else if (e.code == 27) { // Escape key closeGameInfo() closeScore() setEditMode(false) } }).addEvent('domready', function() { loadSettings() buildMenu() prepareEditTools() prepareGameGraph() wireEvents() if (process.argv.length >= 2) loadGame(process.argv[1]) else newGame() }) window.addEvent('resize', function() { resizeBoard() }).addEvent('beforeunload', function() { if (remote.getCurrentWindow().isMaximized() || remote.getCurrentWindow().isMinimized()) return var size = document.body.getSize() setting.set('window.width', size.x).set('window.height', size.y) })
view/index.js
var remote = require('remote') var fs = require('fs') var shell = require('shell') var sgf = require('../module/sgf.js') var process = remote.require('process') var app = remote.require('app'); var dialog = remote.require('dialog') var setting = remote.require('./module/setting.js') var Menu = remote.require('menu') var Tuple = require('../lib/tuple') var Board = require('../module/board.js') var Scrollbar = require('../lib/gemini-scrollbar') /** * Getters and setters */ function setIsLoading(loading) { if (loading) document.body.addClass('loading') else $('loading').tween('opacity', 0).get('tween').addEvent('complete', function() { document.body.removeClass('loading') $('loading').setStyle('opacity', null) }) } function getShowVariations() { return $('goban').hasClass('variations') } function setShowVariations(show) { if (show) $('goban').addClass('variations') else $('goban').removeClass('variations') setting.set('view.show_variations', show) } function getFuzzyStonePlacement() { return $('goban').hasClass('fuzzy') } function setFuzzyStonePlacement(fuzzy) { if (fuzzy) $('goban').addClass('fuzzy') else $('goban').removeClass('fuzzy') setting.set('view.fuzzy_stone_placement', fuzzy) } function getShowCoordinates() { return $('goban').hasClass('coordinates') } function setShowCoordinates(show) { if (show) $('goban').addClass('coordinates') else $('goban').removeClass('coordinates') setting.set('view.show_coordinates', show) } function getShowSidebar() { return document.body.hasClass('sidebar') } function setShowSidebar(show) { if (show) document.body.addClass('sidebar') else document.body.removeClass('sidebar') $('sidebar').setStyle('width', setting.get('view.sidebar_width')) $('main').setStyle('right', show ? setting.get('view.sidebar_width') : 0) setting.set('view.show_sidebar', show) if (show) { updateGraph() updateSlider() } else { // Clear game graph var s = $('graph').retrieve('sigma') if (s) { s.graph.clear() s.refresh() } } // Resize window var win = remote.getCurrentWindow() var size = win.getContentSize() if (win.isMaximized()) return win.setContentSize(size[0] + (show ? 1 : -1) * setting.get('view.sidebar_width').toInt(), size[1]) } function getSidebarWidth() { return $('sidebar').getStyle('width').toInt() } function setSidebarWidth(width) { $('sidebar').setStyle('width', width) $$('.sidebar #main').setStyle('right', width) } function getPlayerName(sign) { return $$('#player_' + sign + ' .name')[0].get('text') } function setPlayerName(sign, name) { if (name.trim() == '') name = sign > 0 ? 'Black' : 'White' $$('#player_' + sign + ' .name')[0].set('text', name) } function getCaptures() { return { '-1': $$('#player_-1 .captures')[0].get('text').toInt(), '1': $$('#player_1 .captures')[0].get('text').toInt() } } function setCaptures(captures) { $$('#player_-1 .captures')[0].set('text', captures['-1']) .setStyle('opacity', captures['-1'] == 0 ? 0 : .7) $$('#player_1 .captures')[0].set('text', captures['1']) .setStyle('opacity', captures['1'] == 0 ? 0 : .7) } function getCurrentPlayer() { return $$('.currentplayer')[0].get('src') == '../img/ui/blacktoplay.png' ? 1 : -1 } function setCurrentPlayer(sign) { $$('.currentplayer').set('src', sign > 0 ? '../img/ui/blacktoplay.png' : '../img/ui/whitetoplay.png') } function getSliderValue() { return $$('#sidebar .slider div')[0].getStyle('height').toInt() } function setSliderValue(value) { $$('#sidebar .slider div')[0].setStyle('height', value + '%') } function getRootTree() { if (!getCurrentTreePosition()) return null return getCurrentTreePosition().unpack(function(tree, index) { while (tree.parent != null) tree = tree.parent return tree }) } function setRootTree(tree) { if (tree.nodes.length == 0) return tree.parent = null setCurrentTreePosition(sgf.addBoard(tree), 0) // Update UI if (getShowSidebar()) { updateGraph() updateSlider() } if ('PB' in tree.nodes[0]) setPlayerName(1, tree.nodes[0].PB[0]) if ('PW' in tree.nodes[0]) setPlayerName(-1, tree.nodes[0].PW[0]) } function getGraphMatrixDict() { return $('graph').retrieve('graphmatrixdict') } function setGraphMatrixDict(matrixdict) { if (!getShowSidebar()) return var s = $('graph').retrieve('sigma') $('graph').store('graphmatrixdict', matrixdict) s.graph.clear() s.graph.read(sgf.matrix2graph(matrixdict)) s.refresh() } function setCurrentTreePosition(tree, index) { if (!tree || getScoringMode()) return // Remove current graph node color var n = getCurrentGraphNode() if (n && n != getGraphNode(tree, index)) delete n.color $('goban').store('position', new Tuple(tree, index)) // Set current path var t = tree while (t.parent) { t.parent.current = t.parent.subtrees.indexOf(t) t = t.parent } // Update graph and slider var n = getCurrentGraphNode() if (n) { setTimeout(function() { if (getCurrentGraphNode() != n) return centerGraphCameraAt(n) updateSlider() }, 300) } setBoard(sgf.addBoard(tree, index).nodes[index].board) // Determine current player setCurrentPlayer(1) if ('B' in tree.nodes[index]) setCurrentPlayer(-1) else if ('W' in tree.nodes[index]) setCurrentPlayer(1) else if ('PL' in tree.nodes[index]) setCurrentPlayer(tree.nodes[index].PL[0] == 'W' ? -1 : 1) else if ('HA' in tree.nodes[index] && tree.nodes[index].HA[0].toInt() >= 1) setCurrentPlayer(-1) } function getCurrentTreePosition() { return $('goban').retrieve('position') } function getCurrentGraphNode() { if (!getCurrentTreePosition()) return null return getCurrentTreePosition().unpack(getGraphNode) } function getGraphNode(tree, index) { var s = $('graph').retrieve('sigma') return s.graph.nodes(tree.id + '-' + index) } function getSelectedTool() { var li = $$('#edit .selected')[0] var tool = li.get('class').replace('selected', '').replace('-tool', '').trim() if (tool == 'stone') { return li.getElement('img').get('src').contains('_1') ? 'stone_1' : 'stone_-1' } else { return tool } } function getBoard() { return $('goban').retrieve('board') } function setBoard(board) { if (!getBoard() || getBoard().size != board.size) { $('goban').store('board', board) buildBoard() } $('goban').store('board', board) setCaptures(board.captures) for (var x = 0; x < board.size; x++) { for (var y = 0; y < board.size; y++) { var li = $('goban').getElement('.pos_' + x + '-' + y) var sign = board.arrangement[li.retrieve('tuple')] var types = ['ghost_1', 'ghost_-1', 'circle', 'triangle', 'cross', 'square', 'label', 'point'] types.each(function(x) { if (li.hasClass(x)) li.removeClass(x) }) if (li.retrieve('tuple') in board.overlays) { board.overlays[li.retrieve('tuple')].unpack(function(type, ghost, label) { if (type != '') li.addClass(type) if (ghost != 0) li.addClass('ghost_' + ghost) if (label != '') li.set('data-label', label) }) } if (li.hasClass('sign_' + sign)) continue for (var i = -1; i <= 1; i++) { if (li.hasClass('sign_' + i)) li.removeClass('sign_' + i) } li.addClass('sign_' + sign) .getElement('img').set('src', '../img/goban/stone_' + sign + '.png') } } } function getEditMode() { return $('bar').hasClass('edit') } function setEditMode(editMode) { if (editMode) { $('bar').addClass('edit') closeScore() closeGameInfo() } else { $('bar').removeClass('edit') } } function getScoringMode() { return $$('body')[0].hasClass('scoring') } function setScoringMode(scoringMode) { if (scoringMode) { $$('body').addClass('scoring') setEditMode(false) closeGameInfo() var deadstones = getBoard().guessDeadStones() deadstones.each(function(v) { $$('#goban .pos_' + v[0] + '-' + v[1]).addClass('dead') }) updateAreaMap() } else { $$('body').removeClass('scoring') $$('.dead').removeClass('dead') } } function getScoringMethod() { return $$('#score .method .territory')[0].hasClass('current') ? 'territory' : 'area' } function setScoringMethod(method) { $$('#score .method li').removeClass('current') $$('#score .method .' + method).addClass('current') $$('#score tr > *').addClass('disabled') $$('#score table .' + method).removeClass('disabled') setting.set('scoring.method', method) // Update UI for (var sign = -1; sign <= 1; sign += 2) { var tr = $$('#score tbody tr' + (sign < 0 ? ':last-child' : ''))[0] var tds = tr.getElements('td') tds[4].set('text', 0) for (var i = 0; i <= 3; i++) { if (tds[i].hasClass('disabled') || isNaN(tds[i].get('text').toFloat())) continue tds[4].set('text', tds[4].get('text').toFloat() + tds[i].get('text').toFloat()) } } } /** * Methods */ function loadSettings() { if (setting.get('view.fuzzy_stone_placement')) $('goban').addClass('fuzzy') if (setting.get('view.show_coordinates')) $('goban').addClass('coordinates') if (setting.get('view.show_variations')) $('goban').addClass('variations') if (setting.get('view.show_sidebar')) { document.body.addClass('sidebar') setSidebarWidth(setting.get('view.sidebar_width')) } } function prepareGameGraph() { var container = $('graph') var s = new sigma(container) s.settings({ defaultNodeColor: '#eee', defaultEdgeColor: '#eee', defaultNodeBorderColor: 'rgba(255,255,255,.2)', edgeColor: 'default', borderSize: 2, zoomMax: 1, zoomMin: 1, autoResize: false, autoRescale: false }) s.bind('clickNode', function(e) { e.data.node.data.unpack(function(tree, index) { setCurrentTreePosition(tree, index) }) }).bind('rightClickNode', function(e) { e.data.node.data.unpack(function(tree, index) { openNodeMenu(tree, index) }) }) container.store('sigma', s) } function selectTool(tool) { $$('#edit .' + tool + '-tool a').fireEvent('click') } function makeMove(vertex) { if (getBoard().hasVertex(vertex) && getBoard().arrangement[vertex] != 0) return var position = getCurrentTreePosition() var tree = position[0], index = position[1] var color = getCurrentPlayer() > 0 ? 'B' : 'W' var sign = color == 'B' ? 1 : -1 // Check for ko var ko = sgf.navigate(tree, index, -1).unpack(function(prevTree, prevIndex) { if (!prevTree) return var hash = getBoard().makeMove(sign, vertex).getHash() return prevTree.nodes[prevIndex].board.getHash() == hash }) if (ko) { var button = dialog.showMessageBox(remote.getCurrentWindow(), { type: 'info', title: 'Goban', buttons: ['Play Anyway', 'Don’t Play', 'Cancel'], message: 'You are about to play a move which repeats a previous board position. ' + 'This is invalid in some rulesets.' }) if (button != 0) return } // Play sounds if (getBoard().hasVertex(vertex)) { // Detect captured stones if (getBoard().getNeighborhood(vertex).some(function(v) { return getBoard().arrangement[v] == -sign && getBoard().getLiberties(v).length == 1 })) setTimeout(function() { new Audio('../sound/capture' + Math.floor(Math.random() * 5) + '.wav').play() }, 400 + Math.floor(Math.random() * 200)) new Audio('../sound/' + Math.floor(Math.random() * 5) + '.wav').play() } else new Audio('../sound/pass.wav').play() // Randomize shift and readjust var li = $$('#goban .pos_' + vertex[0] + '-' + vertex[1]) var direction = Math.floor(Math.random() * 9) for (var i = 0; i < 9; i++) li.removeClass('shift_' + i) li.addClass('shift_' + direction) if (direction == 1 || direction == 5 || direction == 8) { // Left $$('#goban .pos_' + (vertex[0] - 1) + '-' + vertex[1]) .removeClass('shift_3').removeClass('shift_7').removeClass('shift_6') } else if (direction == 2 || direction == 5 || direction == 6) { // Top $$('#goban .pos_' + vertex[0] + '-' + (vertex[1] - 1)) .removeClass('shift_4').removeClass('shift_7').removeClass('shift_8') } else if (direction == 3 || direction == 7 || direction == 6) { // Right $$('#goban .pos_' + (vertex[0] + 1) + '-' + vertex[1]) .removeClass('shift_1').removeClass('shift_5').removeClass('shift_8') } else if (direction == 4 || direction == 7 || direction == 8) { // Bottom $$('#goban .pos_' + vertex[0] + '-' + (vertex[1] + 1)) .removeClass('shift_2').removeClass('shift_5').removeClass('shift_6') } if (tree.current == null && tree.nodes.length - 1 == index) { // Append move var node = {} node[color] = [sgf.vertex2point(vertex)] tree.nodes.push(node) setCurrentTreePosition(tree, tree.nodes.length - 1) } else { if (index != tree.nodes.length - 1) { // Search for next move var nextNode = tree.nodes[index + 1] var moveExists = color in nextNode && sgf.point2vertex(nextNode[color][0]).equals(vertex) if (moveExists) { setCurrentTreePosition(tree, index + 1) return } } else { // Search for variation var variations = tree.subtrees.filter(function(subtree) { return subtree.nodes.length > 0 && color in subtree.nodes[0] && sgf.point2vertex(subtree.nodes[0][color][0]).equals(vertex) }) if (variations.length > 0) { setCurrentTreePosition(sgf.addBoard(variations[0]), 0) return } } // Create variation var splitted = sgf.splitTree(tree, index) var node = {}; node[color] = [sgf.vertex2point(vertex)] var newtree = { nodes: [node], subtrees: [], parent: splitted, current: null } splitted.subtrees.push(newtree) splitted.current = splitted.subtrees.length - 1 sgf.addBoard(newtree, newtree.nodes.length - 1) setCurrentTreePosition(newtree, 0) } updateGraph() updateSlider() } function updateGraph() { if (!getShowSidebar() || !getRootTree()) return setGraphMatrixDict(sgf.tree2matrixdict(getRootTree())) centerGraphCameraAt(getCurrentGraphNode()) } function updateSlider() { if (!getShowSidebar()) return getCurrentTreePosition().unpack(function(tree, index) { var total = sgf.getCurrentHeight(getRootTree()) - 1 var relative = total + 1 - sgf.getCurrentHeight(tree) + index setSliderValue(total == 0 ? 0 : relative * 100 / total) }) } function vertexClicked() { closeGameInfo() if (!getEditMode() && !getScoringMode()) { if (event.button != 0) return makeMove(this) return } // Scoring mode activated if (getScoringMode()) { if (getBoard().arrangement[this] == 0) return getBoard().getRelatedChains(this).each(function(vertex) { $$('#goban .pos_' + vertex[0] + '-' + vertex[1]).toggleClass('dead') }) updateAreaMap() return } // Edit mode activated getCurrentTreePosition().unpack(function(tree, index) { var node = tree.nodes[index] var tool = getSelectedTool() var board = getBoard() var dictionary = { 'cross': 'MA', 'triangle': 'TR', 'circle': 'CR', 'square': 'SQ', 'number': 'LB', 'label': 'LB' } if (tool.contains('stone')) { if ('B' in node || 'W' in node) { // New variation needed var splitted = sgf.splitTree(tree, index) if (splitted != tree || splitted.subtrees.length != 0) { tree = { nodes: [], subtrees: [], current: null, parent: splitted } splitted.subtrees.push(tree) } node = { PL: getCurrentPlayer() > 0 ? ['B'] : ['W'] } index = tree.nodes.length tree.nodes.push(node) } var sign = tool.contains('_1') ? 1 : -1 if (event.button == 2) sign = -sign var oldSign = board.arrangement[this] var ids = ['AW', 'AE', 'AB'] var id = ids[sign + 1] var point = sgf.vertex2point(this) for (var i = -1; i <= 1; i++) { if (!(ids[i + 1] in node)) continue k = node[ids[i + 1]].indexOf(point) if (k >= 0) { node[ids[i + 1]].splice(k, 1) if (node[ids[i + 1]].length == 0) { delete node[ids[i + 1]] } } } if (oldSign != sign) { if (id in node) node[id].push(point) else node[id] = [point] } else if (oldSign == sign) { if ('AE' in node) node.AE.push(point) else node.AE = [point] } } else { if (event.button != 0) return if (tool != 'label' && tool != 'number') { if (this in board.overlays && board.overlays[this][0] == tool) { delete board.overlays[this] } else { board.overlays[this] = new Tuple(tool, 0, '') } } else if (tool == 'number') { if (this in board.overlays && board.overlays[this][0] == 'label') { delete board.overlays[this] } else { var number = 1 if ('LB' in node) { node.LB.each(function(value) { var label = value.substr(3).toInt() if (!isNaN(label)) number = Math.max(number, label + 1) }) } board.overlays[this] = new Tuple(tool, 0, number.toString()) } } else if (tool == 'label') { if (this in board.overlays && board.overlays[this][0] == 'label') { delete board.overlays[this] } else { var alpha = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' var k = 0 if ('LB' in node) { node.LB.each(function(value) { if (value.length != 4 || !alpha.contains(value[3])) return var label = value[3] k = Math.max(k, (alpha.indexOf(label) + 1) % alpha.length) }) } board.overlays[this] = new Tuple(tool, 0, alpha[k]) } } Object.each(dictionary, function(id) { delete node[id] }) $$('#goban .row li').each(function(li) { var vertex = li.retrieve('tuple') if (!(vertex in board.overlays)) return var id = dictionary[board.overlays[vertex][0]] var pt = sgf.vertex2point(vertex) if (id == 'LB') pt += ':' + board.overlays[vertex][2] if (id in node) node[id].push(pt) else node[id] = [pt] }) } setCurrentTreePosition(tree, index) }.bind(this)) } function buildBoard() { var board = getBoard() var rows = [] var hoshi = board.getHandicapPlacement(9) for (var y = 0; y < board.size; y++) { var ol = new Element('ol.row') for (var x = 0; x < board.size; x++) { var vertex = new Tuple(x, y) var li = new Element('li.pos_' + x + '-' + y) .store('tuple', vertex) .addClass('shift_' + Math.floor(Math.random() * 9)) var img = new Element('img', { src: '../img/goban/stone_0.png' }) if (hoshi.some(function(v) { return v.equals(vertex) })) li.addClass('hoshi') ol.adopt(li.adopt(img) .addEvent('mouseup', function() { if (!$('goban').retrieve('mousedown')) return $('goban').store('mousedown', false) vertexClicked.call(this) }.bind(vertex)) .addEvent('mousedown', function() { $('goban').store('mousedown', true) }) ) } rows.push(ol) } var alpha = 'ABCDEFGHJKLMNOPQRSTUVWXYZ' var coordx = new Element('ol.coordx') var coordy = new Element('ol.coordy') for (var i = 0; i < board.size; i++) { coordx.adopt(new Element('li', { text: alpha[i] })) coordy.adopt(new Element('li', { text: board.size - i })) } var goban = $$('#goban div')[0] goban.empty().adopt(rows, coordx, coordy) goban.grab(coordx.clone(), 'top').grab(coordy.clone(), 'top') resizeBoard() } function resizeBoard() { var board = getBoard() if (!board) return var width = $('goban').getStyle('width').toInt() var height = $('goban').getStyle('height').toInt() var min = Math.min(width, height) var hasCoordinates = getShowCoordinates() var fieldsize = Math.floor(min / board.size) min = fieldsize * board.size if (hasCoordinates) { fieldsize = Math.floor(min / (board.size + 2)) min = fieldsize * (board.size + 2) } $$('#goban > div').setStyle('width', min).setStyle('height', min) $$('#goban .row, #goban .coordx').setStyle('height', fieldsize).setStyle('line-height', fieldsize) $$('#goban .row, #goban .coordx').setStyle('margin-left', hasCoordinates ? fieldsize : 0) $$('#goban .coordy').setStyle('width', fieldsize).setStyle('top', fieldsize).setStyle('line-height', fieldsize) $$('#goban .coordy:last-child').setStyle('left', fieldsize * (board.size + 1)) $$('#goban li').setStyle('width', fieldsize).setStyle('height', fieldsize) } function showGameInfo() { closeScore() var tree = getRootTree() var rootNode = tree.nodes[0] var info = $('info') info.addClass('show').getElement('input[name="name_1"]').focus() info.getElement('input[name="name_1"]').set('value', getPlayerName(1)) info.getElement('input[name="name_-1"]').set('value', getPlayerName(-1)) info.getElement('input[name="rank_1"]').set('value', 'BR' in rootNode ? rootNode.BR[0] : '') info.getElement('input[name="rank_-1"]').set('value', 'WR' in rootNode ? rootNode.WR[0] : '') info.getElement('input[name="result"]').set('value', 'RE' in rootNode ? rootNode.RE[0] : '') info.getElement('input[name="komi"]').set('value', 'KM' in rootNode ? rootNode.KM[0].toFloat() : '') var size = info.getElement('input[name="size"]') size.set('value', 'SZ' in rootNode ? rootNode.SZ[0] : '') var handicap = info.getElement('select[name="handicap"]') if ('HA' in rootNode) handicap.selectedIndex = rootNode.HA[0].toInt() - 1 else handicap.selectedIndex = 0 var disabled = tree.nodes.length > 1 || tree.subtrees.length > 0 handicap.disabled = disabled size.disabled = disabled } function closeGameInfo() { $('info').removeClass('show') } function updateGameInfo() { var rootNode = getRootTree().nodes[0] var info = $('info') rootNode.BR = [info.getElement('input[name="rank_1"]').get('value').trim()] rootNode.WR = [info.getElement('input[name="rank_-1"]').get('value').trim()] rootNode.PB = [info.getElement('input[name="name_1"]').get('value').trim()] rootNode.PW = [info.getElement('input[name="name_-1"]').get('value').trim()] setPlayerName(1, rootNode.PB[0]) setPlayerName(-1, rootNode.PW[0]) var result = info.getElement('input[name="result"]').get('value').trim() rootNode.RE = [result] if (result == '') delete rootNode.RE var komi = info.getElement('input[name="komi"]').get('value').toFloat() rootNode.KM = [String.from(komi)] if (isNaN(komi)) rootNode.KM = ['0'] var handicap = info.getElement('select[name="handicap"]').selectedIndex if (handicap == 0) delete rootNode.HA else rootNode.HA = [String.from(handicap + 1)] var size = info.getElement('input[name="size"]').get('value').toInt() rootNode.SZ = [String.from(Math.max(Math.min(size, 26), 9))] if (isNaN(size)) rootNode.SZ = ['19'] if (!info.getElement('select[name="handicap"]').disabled) { setCurrentTreePosition(getRootTree(), 0) if (!('HA' in rootNode)) { delete rootNode.AB } else { var board = getBoard() var stones = board.getHandicapPlacement(rootNode.HA[0].toInt()) rootNode.AB = [] for (var i = 0; i < stones.length; i++) { rootNode.AB.push(sgf.vertex2point(stones[i])) } } setCurrentTreePosition(getRootTree(), 0) } } function showScore() { var board = $('goban').retrieve('finalboard') var score = board.getScore($('goban').retrieve('areamap')) var rootNode = getRootTree().nodes[0] for (var sign = -1; sign <= 1; sign += 2) { var tr = $$('#score tbody tr' + (sign < 0 ? ':last-child' : ''))[0] var tds = tr.getElements('td') tds[0].set('text', score['area_' + sign]) tds[1].set('text', score['territory_' + sign]) tds[2].set('text', score['captures_' + sign]) if (sign < 0) tds[3].set('text', ('KM' in rootNode ? rootNode.KM[0] : '0').toFloat()) tds[4].set('text', 0) setScoringMethod(setting.get('scoring.method')) } closeGameInfo() $('score').addClass('show') } function closeScore() { $('score').removeClass('show') setScoringMode(false) } function updateScore() { var rootNode = getRootTree().nodes[0] var results = $$('#score tbody td:last-child').get('text') var diff = results[0].toFloat() - results[1].toFloat() var result = diff > 0 ? 'B+' : (diff < 0 ? 'W+' : 'Draw') if (diff != 0) result = result + Math.abs(diff) rootNode.RE = [result] } function updateAreaMap() { var board = getBoard().makeMove(0) $$('#goban .row li.dead').each(function(li) { if (li.hasClass('sign_1')) board.captures['-1']++ else if (li.hasClass('sign_-1')) board.captures['1']++ board.arrangement[li.retrieve('tuple')] = 0 }) var map = board.getAreaMap() $$('#goban .row li').each(function(li) { li.removeClass('area_-1').removeClass('area_0').removeClass('area_1') .addClass('area_' + map[li.retrieve('tuple')]) if (!li.getElement('div.area')) li.grab(new Element('div', { class: 'area' })) }) $('goban').store('areamap', map) .store('finalboard', board) } function prepareEditTools() { $$('#edit ul a').addEvent('click', function() { if (!this.getParent().hasClass('selected')) { $$('#edit .selected').removeClass('selected') this.getParent().addClass('selected') } else if (this.getParent().hasClass('stone-tool')) { var img = this.getElement('img') var black = img.get('src') == '../img/edit/stone_1.png' img.set('src', black ? '../img/edit/stone_-1.png' : '../img/edit/stone_1.png') } }) } function wireEvents() { $('goban').addEvent('mousewheel', function(e) { if (e.wheel < 0) goForward() else if (e.wheel > 0) goBack() }) // Resize sidebar $$('#sidebar .verticalresizer').addEvent('mousedown', function() { if (event.button != 0) return $('sidebar').store('initpos', new Tuple(event.x, getSidebarWidth())) }) document.body.addEvent('mouseup', function() { if (!$('sidebar').retrieve('initpos')) return $('sidebar').store('initpos', null) if ($('graph').retrieve('sigma')) $('graph').retrieve('sigma').renderers[0].resize().render() setting.set('view.sidebar_width', getSidebarWidth()) }).addEvent('mousemove', function() { var initPos = $('sidebar').retrieve('initpos') if (!initPos) return initPos.unpack(function(initX, initWidth) { setSidebarWidth(initWidth - event.x + initX) resizeBoard() }) }) } function centerGraphCameraAt(node) { if (!getShowSidebar()) return var s = $('graph').retrieve('sigma') node.color = '#E64533' s.refresh() sigma.misc.animation.camera( s.camera, { x: node[s.camera.readPrefix + 'x'], y: node[s.camera.readPrefix + 'y'] }, { duration: 300 } ) } /** * Menu */ function newGame(playSound) { var buffer = ';GM[1]AP[' + app.getName() + ':' + app.getVersion() + ']' buffer += 'GM[1]CA[UTF-8]PB[Black]PW[White]KM[6.5]SZ[19]' var tree = sgf.parse(sgf.tokenize(buffer)) setRootTree(tree) if (arguments.length >= 1 && playSound) { // Called from menu new Audio('../sound/newgame.wav').play() showGameInfo() } closeScore() } function loadGame(filename) { setIsLoading(true) if (arguments.length == 0) { var result = dialog.showOpenDialog(remote.getCurrentWindow(), { filters: [{ name: 'SGF Files', extensions: ['sgf'] }, { name: 'All Files', extensions: ['*'] }] }) if (result) filename = result[0] } if (filename) { var tree = sgf.parseFile(filename) if (tree.subtrees.length == 0) return setRootTree(tree.subtrees[0]) } setIsLoading(false) closeGameInfo() closeScore() } function saveGame() { setIsLoading(true) var result = dialog.showSaveDialog(remote.getCurrentWindow(), { filters: [{ name: 'SGF Files', extensions: ['sgf'] }, { name: 'All Files', extensions: ['*'] }] }) if (result) { var tree = getRootTree() var text = '(' + sgf.tree2string(tree) + ')' fs.writeFile(result, text) } setIsLoading(false) } function goBack() { getCurrentTreePosition().unpack(function(tree, position) { sgf.navigate(tree, position, -1).unpack(function(prevTree, prevIndex) { setCurrentTreePosition(prevTree, prevIndex) }) }) } function goForward() { getCurrentTreePosition().unpack(function(tree, position) { sgf.navigate(tree, position, 1).unpack(function(nextTree, nextIndex) { setCurrentTreePosition(nextTree, nextIndex) }) }) } function goToNextFork() { getCurrentTreePosition().unpack(function(tree, index) { if (index != tree.nodes.length - 1) setCurrentTreePosition(tree, tree.nodes.length - 1) else if (tree.current != null) { var subtree = tree.subtrees[tree.current] setCurrentTreePosition(subtree, subtree.nodes.length - 1) } }) } function goToPreviousFork() { getCurrentTreePosition().unpack(function(tree, index) { if (tree.parent == null || tree.parent.nodes.length == 0) setCurrentTreePosition(tree, 0) else setCurrentTreePosition(tree.parent, tree.parent.nodes.length - 1) }) } function goToBeginning() { var tree = getRootTree() if (tree.nodes.length == 0) return setCurrentTreePosition(tree, 0) } function goToEnd() { getCurrentTreePosition().unpack(function(tree, position) { var t = tree while (t.current != null) { t = t.subtrees[t.current] } setCurrentTreePosition(t, t.nodes.length - 1) }) } function goToNextVariation() { getCurrentTreePosition().unpack(function(tree, index) { if (!tree.parent) return var mod = tree.parent.subtrees.length tree.parent.current = (tree.parent.current + 1) % mod setCurrentTreePosition(tree.parent.subtrees[tree.parent.current], 0) }) } function goToPreviousVariation() { getCurrentTreePosition().unpack(function(tree, index) { if (!tree.parent) return var mod = tree.parent.subtrees.length tree.parent.current = (tree.parent.current + mod - 1) % mod setCurrentTreePosition(tree.parent.subtrees[tree.parent.current], 0) }) } function removeNode(tree, index) { if (!tree.parent && index == 0) { dialog.showMessageBox(remote.getCurrentWindow(), { type: 'warning', title: 'Goban', buttons: ['OK'], message: 'The root node cannot be removed.' }) return } var prev = sgf.navigate(tree, index, -1) if (index != 0) { tree.nodes.splice(index, tree.nodes.length) tree.current = null tree.subtrees.length = 0 } else { var parent = tree.parent var i = parent.subtrees.indexOf(tree) parent.subtrees.splice(i, 1) if (parent.current >= i) parent.current-- sgf.reduceTree(parent) } setGraphMatrixDict(sgf.tree2matrixdict(getRootTree())) if (!getCurrentGraphNode()) { setCurrentTreePosition(prev[0], prev[1]) } else { centerGraphCameraAt(getCurrentGraphNode()) } } function buildMenu() { var template = [ { label: '&Game', submenu: [ { label: '&New', accelerator: 'CmdOrCtrl+N', click: function() { newGame(true) } }, { label: '&Load…', accelerator: 'CmdOrCtrl+O', click: function() { loadGame() } }, // { type: 'separator' }, // { // label: '&Save', // accelerator: 'CmdOrCtrl+S' // }, { label: 'Save &As…', accelerator: 'CmdOrCtrl+S', click: function() { saveGame() } }, { type: 'separator' }, { label: '&Info', accelerator: 'CmdOrCtrl+I', click: showGameInfo } ] }, { label: '&Edit', submenu: [ { label: 'Toggle &Edit Mode', accelerator: 'CmdOrCtrl+E', click: function() { setEditMode(!getEditMode()) } }, { type: 'separator' }, { label: '&Stone Tool', accelerator: 'CmdOrCtrl+1', click: function() { setEditMode(true) selectTool('stone') } }, { label: '&Cross Tool', accelerator: 'CmdOrCtrl+2', click: function() { setEditMode(true) selectTool('cross') } }, { label: '&Triangle Tool', accelerator: 'CmdOrCtrl+3', click: function() { setEditMode(true) selectTool('triangle') } }, { label: '&Square Tool', accelerator: 'CmdOrCtrl+4', click: function() { setEditMode(true) selectTool('square') } }, { label: '&Circle Tool', accelerator: 'CmdOrCtrl+5', click: function() { setEditMode(true) selectTool('circle') } }, { label: '&Label Tool', accelerator: 'CmdOrCtrl+6', click: function() { setEditMode(true) selectTool('label') } }, { label: '&Number Tool', accelerator: 'CmdOrCtrl+7', click: function() { setEditMode(true) selectTool('number') } } ] }, { label: '&Navigation', submenu: [ { label: '&Back', accelerator: 'Up', click: goBack }, { label: '&Forward', accelerator: 'Down', click: goForward }, { type: 'separator' }, { label: 'Go To &Previous Fork', accelerator: 'CmdOrCtrl+Up', click: goToPreviousFork }, { label: 'Go To &Next Fork', accelerator: 'CmdOrCtrl+Down', click: goToNextFork }, { type: 'separator' }, { label: 'Go To &Beginning', accelerator: 'CmdOrCtrl+Home', click: goToBeginning }, { label: 'Go To &End', accelerator: 'CmdOrCtrl+End', click: goToEnd }, { type: 'separator' }, { label: 'Go To Next Variatio&n', accelerator: 'Right', click: goToNextVariation }, { label: 'Go To Previous &Variation', accelerator: 'Left', click: goToPreviousVariation } ] }, { label: '&View', submenu: [ { label: '&Fuzzy Stone Placement', type: 'checkbox', checked: getFuzzyStonePlacement(), click: function() { setFuzzyStonePlacement(!getFuzzyStonePlacement()) } }, { label: 'Show &Coordinates', type: 'checkbox', checked: getShowCoordinates(), click: function() { setShowCoordinates(!getShowCoordinates()) resizeBoard() } }, { label: 'Show &Variations', type: 'checkbox', checked: getShowVariations(), click: function() { setShowVariations(!getShowVariations()) } }, { type: 'separator' }, { label: 'Show Game &Graph', type: 'checkbox', checked: getShowSidebar(), click: function() { setShowSidebar(!getShowSidebar()) resizeBoard() } } ] }, { label: '&Help', submenu: [ { label: app.getName(), enabled: false }, { label: 'Version ' + app.getVersion(), enabled: false }, { type: 'separator' }, { label: 'Issues', click: function() { shell.openExternal('https://github.com/yishn/Goban/issues') } }, { label: 'GitHub Respository', click: function() { shell.openExternal('https://github.com/yishn/Goban') } } ] } ] Menu.setApplicationMenu(Menu.buildFromTemplate(template)) } function openHeaderMenu() { var template = [ { label: '&Pass', click: function() { makeMove(new Tuple(-1, -1)) } }, { label: '&Score', click: function() { setScoringMode(true) } }, { type: 'separator' }, { label: '&Edit', click: function() { setEditMode(true) } }, { label: '&Info', click: showGameInfo } ] menu = Menu.buildFromTemplate(template) menu.popup(remote.getCurrentWindow(), $('headermenu').getPosition().x, $$('header')[0].getCoordinates().top) } function openNodeMenu(tree, index) { if (getScoringMode()) return var template = [ { label: '&Remove', click: function() { removeNode(tree, index) } } ] menu = Menu.buildFromTemplate(template) menu.popup(remote.getCurrentWindow(), event.x, event.y) } /** * Main events */ document.addEvent('keydown', function(e) { if (e.code == 123) { // F12 remote.getCurrentWindow().toggleDevTools() } else if (e.code == 116) { // F5 location.reload() } else if (e.code == 27) { // Escape key closeGameInfo() closeScore() setEditMode(false) } }).addEvent('domready', function() { loadSettings() buildMenu() prepareEditTools() prepareGameGraph() wireEvents() if (process.argv.length >= 2) loadGame(process.argv[1]) else newGame() }) window.addEvent('resize', function() { resizeBoard() }).addEvent('beforeunload', function() { if (remote.getCurrentWindow().isMaximized() || remote.getCurrentWindow().isMinimized()) return var size = document.body.getSize() setting.set('window.width', size.x).set('window.height', size.y) })
Trees are generated with ids 2
view/index.js
Trees are generated with ids 2
<ide><path>iew/index.js <ide> var fs = require('fs') <ide> var shell = require('shell') <ide> var sgf = require('../module/sgf.js') <add>var uuid = require('../lib/node-uuid') <ide> var process = remote.require('process') <ide> var app = remote.require('app'); <ide> var dialog = remote.require('dialog') <ide> <ide> var splitted = sgf.splitTree(tree, index) <ide> var node = {}; node[color] = [sgf.vertex2point(vertex)] <del> var newtree = { nodes: [node], subtrees: [], parent: splitted, current: null } <add> var newtree = { id: uuid.v4(), nodes: [node], subtrees: [], parent: splitted, current: null } <ide> <ide> splitted.subtrees.push(newtree) <ide> splitted.current = splitted.subtrees.length - 1
JavaScript
mit
452df28eedbe60d30d700cc6d8933d46e4e0489e
0
RhaLabs/ProjectManagement,RhaLabs/ProjectManagement,RhaLabs/ProjectManagement,RhaLabs/ProjectManagement
$( document ).ready(function( ) { if ( $('table td.last-column').html() != 'No result') { var $docHeight = window.innerHeight - 300; var table = $('table').DataTable( { "scrollY": $docHeight, "scrollX": "100%", "scrollCollapse": true, "paging": false, "ordering": false, "filter": false, "info": false } ); new $.fn.dataTable.FixedColumns( table, { "leftColumns": 4 } ); $.fn.dataTableExt.sErrMode = 'throw'; } });
src/Application/GlobalBundle/Resources/public/js/initDataTable.js
$( document ).ready(function( ) { var $docHeight = window.innerHeight - 300; var table = $('table').DataTable( { "scrollY": $docHeight, "scrollX": "100%", "scrollCollapse": true, "paging": false, "ordering": false, "filter": false, "info": false } ); new $.fn.dataTable.FixedColumns( table, { "leftColumns": 4 } ); });
check for empty data set
src/Application/GlobalBundle/Resources/public/js/initDataTable.js
check for empty data set
<ide><path>rc/Application/GlobalBundle/Resources/public/js/initDataTable.js <ide> $( document ).ready(function( ) { <del> var $docHeight = window.innerHeight - 300; <del> <del> var table = $('table').DataTable( { <del> "scrollY": $docHeight, <del> "scrollX": "100%", <del> "scrollCollapse": true, <del> "paging": false, <del> "ordering": false, <del> "filter": false, <del> "info": false <del> } ); <del> <add> if ( $('table td.last-column').html() != 'No result') { <add> var $docHeight = window.innerHeight - 300; <ide> <del> new $.fn.dataTable.FixedColumns( table, { <del> "leftColumns": 4 <del> } ); <add> var table = $('table').DataTable( { <add> "scrollY": $docHeight, <add> "scrollX": "100%", <add> "scrollCollapse": true, <add> "paging": false, <add> "ordering": false, <add> "filter": false, <add> "info": false <add> } ); <add> <add> <add> new $.fn.dataTable.FixedColumns( table, { <add> "leftColumns": 4 <add> } ); <add> <add> $.fn.dataTableExt.sErrMode = 'throw'; <add> } <ide> });
Java
apache-2.0
6d46648ab0ab748e2e3d44a1feffd571a90345d6
0
Distrotech/gerrit,MerritCR/merrit,midnightradio/gerrit,WANdisco/gerrit,Saulis/gerrit,qtproject/qtqa-gerrit,TonyChai24/test,dwhipstock/gerrit,Saulis/gerrit,thinkernel/gerrit,hdost/gerrit,MerritCR/merrit,bpollack/gerrit,joshuawilson/merrit,GerritCodeReview/gerrit,dwhipstock/gerrit,Team-OctOS/host_gerrit,gerrit-review/gerrit,supriyantomaftuh/gerrit,thesamet/gerrit,jackminicloud/test,hdost/gerrit,qtproject/qtqa-gerrit,renchaorevee/gerrit,GerritCodeReview/gerrit,hdost/gerrit,WANdisco/gerrit,thinkernel/gerrit,bootstraponline-archive/gerrit-mirror,gcoders/gerrit,joshuawilson/merrit,anminhsu/gerrit,netroby/gerrit,hdost/gerrit,MerritCR/merrit,bootstraponline-archive/gerrit-mirror,joshuawilson/merrit,bootstraponline-archive/gerrit-mirror,qtproject/qtqa-gerrit,thesamet/gerrit,midnightradio/gerrit,Overruler/gerrit,quyixia/gerrit,TonyChai24/test,midnightradio/gerrit,gcoders/gerrit,netroby/gerrit,thesamet/gerrit,gcoders/gerrit,GerritCodeReview/gerrit,supriyantomaftuh/gerrit,netroby/gerrit,gracefullife/gerrit,zommarin/gerrit,Seinlin/gerrit,Overruler/gerrit,renchaorevee/gerrit,qtproject/qtqa-gerrit,basilgor/gerrit,gerrit-review/gerrit,midnightradio/gerrit,jackminicloud/test,ckamm/gerrit,bootstraponline-archive/gerrit-mirror,TonyChai24/test,jackminicloud/test,joshuawilson/merrit,TonyChai24/test,qtproject/qtqa-gerrit,thinkernel/gerrit,joshuawilson/merrit,Overruler/gerrit,gracefullife/gerrit,Saulis/gerrit,zommarin/gerrit,Saulis/gerrit,MerritCR/merrit,gerrit-review/gerrit,Overruler/gerrit,Seinlin/gerrit,MerritCR/merrit,thesamet/gerrit,ckamm/gerrit,netroby/gerrit,ckamm/gerrit,dwhipstock/gerrit,dwhipstock/gerrit,gerrit-review/gerrit,basilgor/gerrit,quyixia/gerrit,bpollack/gerrit,pkdevbox/gerrit,bpollack/gerrit,bpollack/gerrit,pkdevbox/gerrit,dwhipstock/gerrit,ckamm/gerrit,midnightradio/gerrit,MerritCR/merrit,jackminicloud/test,Team-OctOS/host_gerrit,hdost/gerrit,renchaorevee/gerrit,Distrotech/gerrit,Saulis/gerrit,gcoders/gerrit,joshuawilson/merrit,basilgor/gerrit,Distrotech/gerrit,thinkernel/gerrit,GerritCodeReview/gerrit,renchaorevee/gerrit,GerritCodeReview/gerrit,Team-OctOS/host_gerrit,anminhsu/gerrit,Saulis/gerrit,thesamet/gerrit,anminhsu/gerrit,Seinlin/gerrit,bpollack/gerrit,anminhsu/gerrit,thinkernel/gerrit,supriyantomaftuh/gerrit,basilgor/gerrit,supriyantomaftuh/gerrit,netroby/gerrit,WANdisco/gerrit,TonyChai24/test,Distrotech/gerrit,Seinlin/gerrit,renchaorevee/gerrit,gcoders/gerrit,Overruler/gerrit,pkdevbox/gerrit,anminhsu/gerrit,supriyantomaftuh/gerrit,renchaorevee/gerrit,anminhsu/gerrit,MerritCR/merrit,netroby/gerrit,gerrit-review/gerrit,basilgor/gerrit,pkdevbox/gerrit,GerritCodeReview/gerrit,Team-OctOS/host_gerrit,WANdisco/gerrit,bpollack/gerrit,gracefullife/gerrit,jackminicloud/test,Distrotech/gerrit,GerritCodeReview/gerrit,quyixia/gerrit,quyixia/gerrit,bootstraponline-archive/gerrit-mirror,quyixia/gerrit,qtproject/qtqa-gerrit,Team-OctOS/host_gerrit,thesamet/gerrit,WANdisco/gerrit,Seinlin/gerrit,WANdisco/gerrit,ckamm/gerrit,pkdevbox/gerrit,dwhipstock/gerrit,thinkernel/gerrit,jackminicloud/test,Seinlin/gerrit,MerritCR/merrit,hdost/gerrit,gracefullife/gerrit,quyixia/gerrit,pkdevbox/gerrit,Seinlin/gerrit,dwhipstock/gerrit,jackminicloud/test,gracefullife/gerrit,thinkernel/gerrit,supriyantomaftuh/gerrit,TonyChai24/test,renchaorevee/gerrit,supriyantomaftuh/gerrit,hdost/gerrit,Distrotech/gerrit,TonyChai24/test,anminhsu/gerrit,zommarin/gerrit,netroby/gerrit,pkdevbox/gerrit,qtproject/qtqa-gerrit,bootstraponline-archive/gerrit-mirror,Distrotech/gerrit,WANdisco/gerrit,gerrit-review/gerrit,Team-OctOS/host_gerrit,midnightradio/gerrit,gerrit-review/gerrit,joshuawilson/merrit,gcoders/gerrit,gcoders/gerrit,joshuawilson/merrit,quyixia/gerrit,Team-OctOS/host_gerrit,zommarin/gerrit,Overruler/gerrit,thesamet/gerrit,zommarin/gerrit,GerritCodeReview/gerrit
// Copyright (C) 2009 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.sshd.commands; import com.google.gerrit.common.data.GlobalCapability; import com.google.gerrit.common.errors.PermissionDeniedException; import com.google.gerrit.extensions.annotations.RequiresCapability; import com.google.gerrit.server.IdentifiedUser; import com.google.gerrit.sshd.AdminHighPriorityCommand; import com.google.gerrit.sshd.CommandMetaData; import com.google.gerrit.sshd.SshCommand; import com.google.inject.Inject; import org.kohsuke.args4j.Option; /** Opens a query processor. */ @AdminHighPriorityCommand @RequiresCapability(GlobalCapability.ACCESS_DATABASE) @CommandMetaData(name = "gsql", descr = "Administrative interface to active database") final class AdminQueryShell extends SshCommand { @Inject private QueryShell.Factory factory; @Inject private IdentifiedUser currentUser; @Option(name = "--format", usage = "Set output format") private QueryShell.OutputFormat format = QueryShell.OutputFormat.PRETTY; @Option(name = "-c", metaVar = "SQL QUERY", usage = "Query to execute") private String query; @Override protected void run() throws Failure { try { checkPermission(); final QueryShell shell = factory.create(in, out); shell.setOutputFormat(format); if (query != null) { shell.execute(query); } else { shell.run(); } } catch (PermissionDeniedException err) { throw new UnloggedFailure("fatal: " + err.getMessage()); } } /** * Assert that the current user is permitted to perform raw queries. * <p> * As the @RequireCapability guards at various entry points of internal * commands implicitly add administrators (which we want to avoid), we also * check permissions within QueryShell and grant access only to those who * canPerformRawQuery, regardless of whether they are administrators or not. * * @throws PermissionDeniedException */ private void checkPermission() throws PermissionDeniedException { if (!currentUser.getCapabilities().canAccessDatabase()) { throw new PermissionDeniedException(String.format( "%s does not have \"Access Database\" capability.", currentUser.getUserName())); } } }
gerrit-sshd/src/main/java/com/google/gerrit/sshd/commands/AdminQueryShell.java
// Copyright (C) 2009 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.sshd.commands; import com.google.gerrit.common.data.GlobalCapability; import com.google.gerrit.common.errors.PermissionDeniedException; import com.google.gerrit.extensions.annotations.RequiresCapability; import com.google.gerrit.server.IdentifiedUser; import com.google.gerrit.sshd.AdminHighPriorityCommand; import com.google.gerrit.sshd.CommandMetaData; import com.google.gerrit.sshd.SshCommand; import com.google.inject.Inject; import org.kohsuke.args4j.Option; /** Opens a query processor. */ @AdminHighPriorityCommand @RequiresCapability(GlobalCapability.ACCESS_DATABASE) @CommandMetaData(name = "gsql", descr = "Administrative interface to active database") final class AdminQueryShell extends SshCommand { @Inject private QueryShell.Factory factory; @Inject private IdentifiedUser currentUser; @Option(name = "--format", usage = "Set output format") private QueryShell.OutputFormat format = QueryShell.OutputFormat.PRETTY; @Option(name = "-c", metaVar = "SQL QUERY", usage = "Query to execute") private String query; @Override protected void run() throws Failure { try { checkPermission(); final QueryShell shell = factory.create(in, out); shell.setOutputFormat(format); if (query != null) { shell.execute(query); } else { shell.run(); } } catch (PermissionDeniedException err) { throw new UnloggedFailure("fatal: " + err.getMessage()); } } /** * Assert that the current user is permitted to perform raw queries. * <p> * As the @RequireCapability guards at various entry points of internal * commands implicitly add administrators (which we want to avoid), we also * check permissions within QueryShell and grant access only to those who * canPerformRawQuery, regardless of whether they are administrators or not. * * @throws PermissionDeniedException */ private void checkPermission() throws PermissionDeniedException { if (!currentUser.getCapabilities().canAccessDatabase()) { throw new PermissionDeniedException(String.format( "%s does not have \"Perform Raw Query\" capability.", currentUser.getUserName())); } } }
Correct bad name of Access Database capability in error message The global capability required to use gsql was incorrectly named "Perform Raw Query" in the exception message (eventually presented to the user). Now using actual name "Access Database". Change-Id: Ib05b0af4dddf127d532415bf454ed76aa2ed8857
gerrit-sshd/src/main/java/com/google/gerrit/sshd/commands/AdminQueryShell.java
Correct bad name of Access Database capability in error message
<ide><path>errit-sshd/src/main/java/com/google/gerrit/sshd/commands/AdminQueryShell.java <ide> private void checkPermission() throws PermissionDeniedException { <ide> if (!currentUser.getCapabilities().canAccessDatabase()) { <ide> throw new PermissionDeniedException(String.format( <del> "%s does not have \"Perform Raw Query\" capability.", <add> "%s does not have \"Access Database\" capability.", <ide> currentUser.getUserName())); <ide> } <ide> }
Java
epl-1.0
ac932578dfa99f5d93e04c103cf60f067ef8e66f
0
dejanb/hono,dejanb/hono,dejanb/hono
/** * Copyright (c) 2016, 2018 Bosch Software Innovations GmbH. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Bosch Software Innovations GmbH - initial creation */ package org.eclipse.hono.adapter.http; import java.net.HttpURLConnection; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import org.apache.qpid.proton.message.Message; import org.eclipse.hono.client.ClientErrorException; import org.eclipse.hono.client.MessageConsumer; import org.eclipse.hono.client.MessageSender; import org.eclipse.hono.service.AbstractProtocolAdapterBase; import org.eclipse.hono.service.auth.device.Device; import org.eclipse.hono.service.command.Command; import org.eclipse.hono.service.command.CommandResponse; import org.eclipse.hono.service.command.CommandResponseSender; import org.eclipse.hono.service.http.DefaultFailureHandler; import org.eclipse.hono.service.http.HttpUtils; import org.eclipse.hono.tracing.TracingHelper; import org.eclipse.hono.util.Constants; import org.eclipse.hono.util.EventConstants; import org.eclipse.hono.util.ResourceIdentifier; import org.eclipse.hono.util.TelemetryConstants; import org.eclipse.hono.util.TenantObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import io.opentracing.Span; import io.opentracing.SpanContext; import io.opentracing.contrib.vertx.ext.web.TracingHandler; import io.opentracing.contrib.vertx.ext.web.WebSpanDecorator; import io.opentracing.tag.Tags; import io.vertx.core.CompositeFuture; import io.vertx.core.Future; import io.vertx.core.buffer.Buffer; import io.vertx.core.http.HttpServer; import io.vertx.core.http.HttpServerOptions; import io.vertx.core.http.HttpServerResponse; import io.vertx.core.json.JsonObject; import io.vertx.ext.web.Router; import io.vertx.ext.web.RoutingContext; import io.vertx.ext.web.handler.BodyHandler; /** * Base class for a Vert.x based Hono protocol adapter that uses the HTTP protocol. * It provides access to the Telemetry and Event API. * * @param <T> The type of configuration properties used by this service. */ public abstract class AbstractVertxBasedHttpProtocolAdapter<T extends HttpProtocolAdapterProperties> extends AbstractProtocolAdapterBase<T> { /** * Default file uploads directory used by Vert.x Web. */ protected static final String DEFAULT_UPLOADS_DIRECTORY = "/tmp"; private static final Logger LOG = LoggerFactory.getLogger(AbstractVertxBasedHttpProtocolAdapter.class); private static final int AT_LEAST_ONCE = 1; private static final int HEADER_QOS_INVALID = -1; private static final String KEY_TIMER_ID = "timerId"; private HttpServer server; private HttpServer insecureServer; private HttpAdapterMetrics metrics; /** * Sets the metrics for this service. * * @param metrics The metrics */ @Autowired public final void setMetrics(final HttpAdapterMetrics metrics) { this.metrics = metrics; } /** * @return 8443 */ @Override public final int getPortDefaultValue() { return 8443; } /** * @return 8080 */ @Override public final int getInsecurePortDefaultValue() { return 8080; } @Override protected final int getActualPort() { return server != null ? server.actualPort() : Constants.PORT_UNCONFIGURED; } @Override protected final int getActualInsecurePort() { return insecureServer != null ? insecureServer.actualPort() : Constants.PORT_UNCONFIGURED; } /** * Sets the http server instance configured to serve requests over a TLS secured socket. * <p> * If no server is set using this method, then a server instance is created during * startup of this adapter based on the <em>config</em> properties and the server options * returned by {@link #getHttpServerOptions()}. * * @param server The http server. * @throws NullPointerException if server is {@code null}. * @throws IllegalArgumentException if the server is already started and listening on an address/port. */ @Autowired(required = false) public final void setHttpServer(final HttpServer server) { Objects.requireNonNull(server); if (server.actualPort() > 0) { throw new IllegalArgumentException("http server must not be started already"); } else { this.server = server; } } /** * Sets the http server instance configured to serve requests over a plain socket. * <p> * If no server is set using this method, then a server instance is created during * startup of this adapter based on the <em>config</em> properties and the server options * returned by {@link #getInsecureHttpServerOptions()}. * * @param server The http server. * @throws NullPointerException if server is {@code null}. * @throws IllegalArgumentException if the server is already started and listening on an address/port. */ @Autowired(required = false) public final void setInsecureHttpServer(final HttpServer server) { Objects.requireNonNull(server); if (server.actualPort() > 0) { throw new IllegalArgumentException("http server must not be started already"); } else { this.insecureServer = server; } } @Override public final void doStart(final Future<Void> startFuture) { checkPortConfiguration() .compose(s -> preStartup()) .compose(s -> { if (metrics == null) { // use default implementation // which simply discards all reported metrics metrics = new HttpAdapterMetrics(); } final Router router = createRouter(); if (router == null) { return Future.failedFuture("no router configured"); } else { addRoutes(router); return CompositeFuture.all(bindSecureHttpServer(router), bindInsecureHttpServer(router)); } }).compose(s -> { try { onStartupSuccess(); startFuture.complete(); } catch (Exception e) { LOG.error("error in onStartupSuccess", e); startFuture.fail(e); } }, startFuture); } /** * Adds a handler for adding an OpenTracing Span to the routing context. * * @param router The router to add the handler to. * @param position The position to add the tracing handler at. */ private void addTracingHandler(final Router router, final int position) { final Map<String, String> customTags = new HashMap<>(); customTags.put(Tags.COMPONENT.getKey(), getTypeName()); addCustomTags(customTags); final List<WebSpanDecorator> decorators = new ArrayList<>(); decorators.add(new ComponentMetaDataDecorator(customTags)); addCustomSpanDecorators(decorators); final TracingHandler tracingHandler = new TracingHandler(tracer, decorators); router.route().order(position).handler(tracingHandler).failureHandler(tracingHandler); } /** * Adds meta data about this adapter to be included in OpenTracing * spans that are used for tracing requests handled by this adapter. * <p> * This method is empty by default. * * @param customTags The existing custom tags to add to. The map will already * include this adapter's {@linkplain #getTypeName() type name} * under key {@link Tags#COMPONENT}. */ protected void addCustomTags(final Map<String, String> customTags) { // empty by default } /** * Adds decorators to apply to the active OpenTracing span on certain * stages of processing requests handled by this adapter. * <p> * This method is empty by default. * * @param decorators The decorators to add to. The list will already * include a {@linkplain ComponentMetaDataDecorator decorator} for * adding standard tags and component specific tags which can be customized by * means of overriding {@link #addCustomTags(Map)}. */ protected void addCustomSpanDecorators(final List<WebSpanDecorator> decorators) { // empty by default } /** * Invoked before the http server is started. * <p> * May be overridden by sub-classes to provide additional startup handling. * * @return A future indicating the outcome of the operation. The start up process fails if the returned future fails. */ protected Future<Void> preStartup() { return Future.succeededFuture(); } /** * Invoked after this adapter has started up successfully. * <p> * May be overridden by sub-classes. */ protected void onStartupSuccess() { // empty } /** * Creates the router for handling requests. * <p> * This method creates a router instance with the following routes: * <ol> * <li>A default route limiting the body size of requests to the maximum payload size set in the <em>config</em> properties.</li> * </ol> * * @return The newly created router (never {@code null}). */ protected Router createRouter() { final Router router = Router.router(vertx); LOG.info("limiting size of inbound request body to {} bytes", getConfig().getMaxPayloadSize()); router.route().handler(BodyHandler.create(DEFAULT_UPLOADS_DIRECTORY).setBodyLimit(getConfig().getMaxPayloadSize())); addTracingHandler(router, -5); // add default handler for failed routes router.route().order(-1).failureHandler(new DefaultFailureHandler()); return router; } /** * Adds custom routes for handling requests. * <p> * This method is invoked right before the http server is started with the value returned by * {@link AbstractVertxBasedHttpProtocolAdapter#createRouter()}. * * @param router The router to add the custom routes to. */ protected abstract void addRoutes(Router router); /** * Gets the options to use for creating the TLS secured http server. * <p> * Subclasses may override this method in order to customize the server. * <p> * This method returns default options with the host and port being set to the corresponding values * from the <em>config</em> properties and using a maximum chunk size of 4096 bytes. * * @return The http server options. */ protected HttpServerOptions getHttpServerOptions() { final HttpServerOptions options = new HttpServerOptions(); options.setHost(getConfig().getBindAddress()).setPort(getConfig().getPort(getPortDefaultValue())) .setMaxChunkSize(4096); addTlsKeyCertOptions(options); addTlsTrustOptions(options); return options; } /** * Gets the options to use for creating the insecure http server. * <p> * Subclasses may override this method in order to customize the server. * <p> * This method returns default options with the host and port being set to the corresponding values * from the <em>config</em> properties and using a maximum chunk size of 4096 bytes. * * @return The http server options. */ protected HttpServerOptions getInsecureHttpServerOptions() { final HttpServerOptions options = new HttpServerOptions(); options.setHost(getConfig().getInsecurePortBindAddress()).setPort(getConfig().getInsecurePort(getInsecurePortDefaultValue())).setMaxChunkSize(4096); return options; } /** * Invoked before the message is sent to the downstream peer. * <p> * Subclasses may override this method in order to customize the message * before it is sent, e.g. adding custom properties. * * @param downstreamMessage The message that will be sent downstream. * @param ctx The routing context. */ protected void customizeDownstreamMessage(final Message downstreamMessage, final RoutingContext ctx) { // this default implementation does nothing } /** * Gets the authenticated device identity from the routing context. * * @param ctx The routing context. * @return The device or {@code null} if the device has not been authenticated. */ protected final Device getAuthenticatedDevice(final RoutingContext ctx) { return Optional.ofNullable(ctx.user()).map(user -> { if (Device.class.isInstance(user)) { return (Device) user; } else { return null; } }).orElse(null); } private Future<HttpServer> bindSecureHttpServer(final Router router) { if (isSecurePortEnabled()) { final Future<HttpServer> result = Future.future(); final String bindAddress = server == null ? getConfig().getBindAddress() : "?"; if (server == null) { server = vertx.createHttpServer(getHttpServerOptions()); } server.requestHandler(router::accept).listen(done -> { if (done.succeeded()) { LOG.info("secure http server listening on {}:{}", bindAddress, server.actualPort()); result.complete(done.result()); } else { LOG.error("error while starting up secure http server", done.cause()); result.fail(done.cause()); } }); return result; } else { return Future.succeededFuture(); } } private Future<HttpServer> bindInsecureHttpServer(final Router router) { if (isInsecurePortEnabled()) { final Future<HttpServer> result = Future.future(); final String bindAddress = insecureServer == null ? getConfig().getInsecurePortBindAddress() : "?"; if (insecureServer == null) { insecureServer = vertx.createHttpServer(getInsecureHttpServerOptions()); } insecureServer.requestHandler(router::accept).listen(done -> { if (done.succeeded()) { LOG.info("insecure http server listening on {}:{}", bindAddress, insecureServer.actualPort()); result.complete(done.result()); } else { LOG.error("error while starting up insecure http server", done.cause()); result.fail(done.cause()); } }); return result; } else { return Future.succeededFuture(); } } @Override public final void doStop(final Future<Void> stopFuture) { try { preShutdown(); } catch (Exception e) { LOG.error("error in preShutdown", e); } final Future<Void> serverStopTracker = Future.future(); if (server != null) { server.close(serverStopTracker.completer()); } else { serverStopTracker.complete(); } final Future<Void> insecureServerStopTracker = Future.future(); if (insecureServer != null) { insecureServer.close(insecureServerStopTracker.completer()); } else { insecureServerStopTracker.complete(); } CompositeFuture.all(serverStopTracker, insecureServerStopTracker) .compose(v -> postShutdown()) .compose(s -> stopFuture.complete(), stopFuture); } /** * Invoked before the Http server is shut down. * May be overridden by sub-classes. */ protected void preShutdown() { // empty } /** * Invoked after the Adapter has been shutdown successfully. * May be overridden by sub-classes to provide further shutdown handling. * * @return A future that has to be completed when this operation is finished. */ protected Future<Void> postShutdown() { return Future.succeededFuture(); } /** * Uploads the body of an HTTP request as a telemetry message to Hono. * <p> * This method simply invokes {@link #uploadTelemetryMessage(RoutingContext, String, String, Buffer, String)} * with objects retrieved from the routing context. * * @param ctx The context to retrieve the message payload and content type from. * @param tenant The tenant of the device that has produced the data. * @param deviceId The id of the device that has produced the data. * @throws NullPointerException if any of the parameters is {@code null}. */ public final void uploadTelemetryMessage(final RoutingContext ctx, final String tenant, final String deviceId) { uploadTelemetryMessage( Objects.requireNonNull(ctx), Objects.requireNonNull(tenant), Objects.requireNonNull(deviceId), ctx.getBody(), HttpUtils.getContentType(ctx)); } /** * Uploads a telemetry message to Hono. * <p> * This method always sends a response to the device. The status code will be set * as specified in the * <a href="https://www.eclipse.org/hono/user-guide/http-adapter/#publish-telemetry-data-authenticated-device"> * HTTP adapter User Guide</a>. * * @param ctx The context to retrieve cookies and the HTTP response from. * @param tenant The tenant of the device that has produced the data. * @param deviceId The id of the device that has produced the data. * @param payload The message payload to send. * @param contentType The content type of the message payload. * @throws NullPointerException if any of response, tenant or device ID is {@code null}. */ public final void uploadTelemetryMessage(final RoutingContext ctx, final String tenant, final String deviceId, final Buffer payload, final String contentType) { doUploadMessage( Objects.requireNonNull(ctx), Objects.requireNonNull(tenant), Objects.requireNonNull(deviceId), payload, contentType, getTelemetrySender(tenant), TelemetryConstants.TELEMETRY_ENDPOINT); } /** * Uploads the body of an HTTP request as an event message to Hono. * <p> * This method simply invokes {@link #uploadEventMessage(RoutingContext, String, String, Buffer, String)} * with objects retrieved from the routing context. * * @param ctx The context to retrieve the message payload and content type from. * @param tenant The tenant of the device that has produced the data. * @param deviceId The id of the device that has produced the data. * @throws NullPointerException if any of the parameters is {@code null}. */ public final void uploadEventMessage(final RoutingContext ctx, final String tenant, final String deviceId) { uploadEventMessage( Objects.requireNonNull(ctx), Objects.requireNonNull(tenant), Objects.requireNonNull(deviceId), ctx.getBody(), HttpUtils.getContentType(ctx)); } /** * Uploads an event message to Hono. * <p> * This method always sends a response to the device. The status code will be set * as specified in the * <a href="https://www.eclipse.org/hono/user-guide/http-adapter/#publish-an-event-authenticated-device"> * HTTP adapter User Guide</a>. * * @param ctx The context to retrieve cookies and the HTTP response from. * @param tenant The tenant of the device that has produced the data. * @param deviceId The id of the device that has produced the data. * @param payload The message payload to send. * @param contentType The content type of the message payload. * @throws NullPointerException if any of response, tenant or device ID is {@code null}. */ public final void uploadEventMessage(final RoutingContext ctx, final String tenant, final String deviceId, final Buffer payload, final String contentType) { doUploadMessage( Objects.requireNonNull(ctx), Objects.requireNonNull(tenant), Objects.requireNonNull(deviceId), payload, contentType, getEventSender(tenant), EventConstants.EVENT_ENDPOINT); } private void doUploadMessage(final RoutingContext ctx, final String tenant, final String deviceId, final Buffer payload, final String contentType, final Future<MessageSender> senderTracker, final String endpointName) { if (!isPayloadOfIndicatedType(payload, contentType)) { HttpUtils.badRequest(ctx, String.format("content type [%s] does not match payload", contentType)); } else { final Integer qosHeader = getQoSLevel(ctx.request().getHeader(Constants.HEADER_QOS_LEVEL)); if (qosHeader != null && qosHeader == HEADER_QOS_INVALID) { HttpUtils.badRequest(ctx, "unsupported QoS-Level header value"); } else { final Future<Void> responseReady = Future.future(); final Device authenticatedDevice = getAuthenticatedDevice(ctx); final SpanContext currentSpan = Optional.ofNullable((Span) ctx.get(TracingHandler.CURRENT_SPAN)).map(span -> { span.setOperationName("upload " + endpointName); TracingHelper.TAG_TLS.set(span, ctx.request().isSSL()); TracingHelper.TAG_AUTHENTICATED.set(span, authenticatedDevice != null); return span.context(); }).orElse(null); final Future<JsonObject> tokenTracker = getRegistrationAssertion(tenant, deviceId, authenticatedDevice); final Future<TenantObject> tenantConfigTracker = getTenantConfiguration(tenant); final Future<MessageConsumer> commandConsumerTracker = createCommandConsumer(tenant, deviceId, ctx, responseReady); CompositeFuture.all(tokenTracker, tenantConfigTracker, senderTracker, commandConsumerTracker).compose(ok -> { if (tenantConfigTracker.result().isAdapterEnabled(getTypeName())) { final MessageSender sender = senderTracker.result(); final Message downstreamMessage = newMessage( ResourceIdentifier.from(endpointName, tenant, deviceId), sender.isRegistrationAssertionRequired(), ctx.request().uri(), contentType, payload, tokenTracker.result(), HttpUtils.getTimeTilDisconnect(ctx)); customizeDownstreamMessage(downstreamMessage, ctx); addConnectionCloseHandler(ctx, commandConsumerTracker.result(), tenant, deviceId); if (qosHeader == null) { return CompositeFuture.all(sender.send(downstreamMessage, currentSpan), responseReady); } else { return CompositeFuture.all(sender.sendAndWaitForOutcome(downstreamMessage, currentSpan), responseReady); } } else { // this adapter is not enabled for the tenant return Future.failedFuture(new ClientErrorException(HttpURLConnection.HTTP_FORBIDDEN, "adapter is not enabled for tenant")); } }).compose(delivery -> { if (!ctx.response().closed()) { final Command command = Command.get(ctx); setResponsePayload(ctx.response(), command); ctx.addBodyEndHandler(ok -> { LOG.trace("successfully processed [{}] message for device [tenantId: {}, deviceId: {}]", endpointName, tenant, deviceId); metrics.incrementProcessedHttpMessages(endpointName, tenant); }); ctx.response().exceptionHandler(t -> { LOG.debug("failed to send http response for [{}] message from device [tenantId: {}, deviceId: {}]", endpointName, tenant, deviceId, t); if (command != null) { final CommandResponse response = CommandResponse.from(command.getRequestId(), HttpURLConnection.HTTP_UNAVAILABLE); sendCommandResponse(tenant, deviceId, response); } }); ctx.response().end(); } return Future.succeededFuture(); }).recover(t -> { LOG.debug("cannot process [{}] message from device [tenantId: {}, deviceId: {}]", endpointName, tenant, deviceId, t); final Command command = Command.get(ctx); if (command != null) { final CommandResponse response = CommandResponse.from(command.getRequestId(), HttpURLConnection.HTTP_UNAVAILABLE); sendCommandResponse(tenant, deviceId, response); } if (ClientErrorException.class.isInstance(t)) { final ClientErrorException e = (ClientErrorException) t; ctx.fail(e); } else { metrics.incrementUndeliverableHttpMessages(endpointName, tenant); HttpUtils.serviceUnavailable(ctx, 2, "temporarily unavailable"); } return Future.failedFuture(t); }); } } } /** * Attach a handler that is called if a command consumer was opened and the client closes the HTTP connection before a response * with a possible command could be sent. * <p> * In this case, the handler closes the command consumer since a command could not be added to the response anymore. * The application receives an {@link HttpURLConnection#HTTP_UNAVAILABLE} if trying to send the command and can repeat * it later. * * @param ctx The context to retrieve cookies and the HTTP response from. * @param messageConsumer The message consumer to receive a command. Maybe {@code null} - in this case no handler is attached. * @param tenantId The tenant that the device belongs to. * @param deviceId The identifier of the device. */ private void addConnectionCloseHandler(final RoutingContext ctx, final MessageConsumer messageConsumer, final String tenantId, final String deviceId) { Optional.ofNullable(messageConsumer).map(consumer -> { if (!ctx.response().closed()) { ctx.response().closeHandler(v -> { cancelCommandReceptionTimer(ctx); LOG.debug("Connection was closed before response could be sent - closing command consumer for device [tenantId: {}, deviceId: {}]", tenantId, deviceId); getCommandConnection().closeCommandConsumer(tenantId, deviceId).setHandler(result -> { if (result.failed()) { LOG.warn("Close command consumer failed", result.cause()); } }); }); } return consumer; }); } private void setResponsePayload(final HttpServerResponse response, final Command command) { if (command == null) { response.setStatusCode(HttpURLConnection.HTTP_ACCEPTED); } else { LOG.trace("adding command [name: {}, request-id: {}] to response for device [tenant-id: {}, device-id: {}]", command.getName(), command.getRequestId(), command.getTenant(), command.getDeviceId()); response.setStatusCode(HttpURLConnection.HTTP_OK); response.putHeader(Constants.HEADER_COMMAND, command.getName()); response.putHeader(Constants.HEADER_COMMAND_REQUEST_ID, command.getRequestId()); HttpUtils.setResponseBody(response, command.getPayload()); } } /** * Creates a consumer for command messages to be sent to a device. * * @param tenantId The tenant that the device belongs to. * @param deviceId The identifier of the device. * @param ctx The device's currently executing HTTP request. * @param responseReady A future to complete once one of the following conditions are met: * <ul> * <li>the request did not include a <em>hono-ttd</em> parameter or</li> * <li>a command has been received and the response ready future has not yet been * completed or</li> * <li>the ttd has expired</li> * </ul> * @return A future indicating the outcome. * The future will be completed with the created message consumer or it will * be failed with a {@code ServiceInvocationException} if the consumer * could not be created. */ protected final Future<MessageConsumer> createCommandConsumer( final String tenantId, final String deviceId, final RoutingContext ctx, final Future<Void> responseReady) { final long ttdMillis = Optional.ofNullable(HttpUtils.getTimeTilDisconnect(ctx)).map(ttd -> ttd * 1000L).orElse(0L); if (ttdMillis <= 0) { // no need to wait for a command responseReady.tryComplete(); return Future.succeededFuture(); } else { return getCommandConnection().getOrCreateCommandConsumer( tenantId, deviceId, createCommandMessageConsumer(tenantId, deviceId, receivedCommand -> { getCommandConnection().closeCommandConsumer(tenantId, deviceId).setHandler(v -> { if (responseReady.isComplete()) { // the timer has already fired, release the command receivedCommand.release(); } else { // put command to routing context and notify receivedCommand.put(ctx); cancelCommandReceptionTimer(ctx); responseReady.tryComplete(); } if (v.failed()) { LOG.warn("Close command consumer failed", v.cause()); } }); }), remoteDetach -> { LOG.debug("peer closed command receiver link [tenant-id: {}, device-id: {}]", tenantId, deviceId); // command consumer is closed by closeHandler, no explicit close necessary here }).map(consumer -> { consumer.flow(1); if (!responseReady.isComplete()) { // if the request was not responded already, add a timer that closes the command consumer after expiry addCommandReceptionTimer(ctx, tenantId, deviceId, responseReady, ttdMillis); } return consumer; }); } } /** * Add a timer that closes the command connection after the given expiry time in milliseconds. * In this case it additionally completes the <em>responseReady</em> Future. * <p> * The timerId is put to the routing context using the key {@link #KEY_TIMER_ID}. * * @param ctx The device's currently executing HTTP request. * @param tenantId The tenant that the device belongs to. * @param deviceId The identifier of the device. * @param responseReady A future to complete if the timer expired. * @param expiryTimeInMillis The expiry time of the timer. */ private void addCommandReceptionTimer(final RoutingContext ctx, final String tenantId, final String deviceId, final Future<Void> responseReady, final long expiryTimeInMillis) { final Long timerId = ctx.vertx().setTimer(expiryTimeInMillis, id -> { LOG.trace("Command Reception timer fired, id {}", id); if (!responseReady.isComplete()) { // if the request was responded already, responseReady.tryComplete(); getCommandConnection().closeCommandConsumer(tenantId, deviceId).setHandler(v -> { if (v.failed()) { LOG.warn("Close command consumer failed", v.cause()); } }); } else { LOG.trace("Nothing to close for timer since response was sent already"); } }); LOG.trace("Adding command reception timer id {}", timerId); ctx.put(KEY_TIMER_ID, timerId); } private void cancelCommandReceptionTimer(final RoutingContext ctx) { Optional.ofNullable(ctx.get(KEY_TIMER_ID)).map(timerId -> { if ((Long)timerId >= 0) { if (ctx.vertx().cancelTimer((Long)timerId)) { LOG.trace("Cancelled timer id {}", timerId); } else { LOG.debug("Could not cancel timer id {}", timerId); } } return timerId; }); } /** * Uploads a command response message to the Hono server. * * @param ctx The routing context of the HTTP request. * @param tenant The tenant of the device from that a command response was received. * @param deviceId The id of the device from that a command response was received. * @param commandRequestId The id of the command that is responded. * @param commandRequestStatus The status of the command that is responded by the device. * @throws NullPointerException if ctx, tenant or deviceId is {@code null}. * @throws IllegalArgumentException if the commandRequestId cannot be processed since it is invalid, or if the commandRequestStatus * does not contain a valid status code. */ public final void uploadCommandResponseMessage(final RoutingContext ctx, final String tenant, final String deviceId, final String commandRequestId, final Integer commandRequestStatus) { Objects.requireNonNull(ctx); Objects.requireNonNull(tenant); Objects.requireNonNull(deviceId); final Buffer payload = ctx.getBody(); final String contentType = HttpUtils.getContentType(ctx); LOG.debug("uploadCommandResponseMessage: [tenantId: {}, deviceId: {}, commandRequestId: {}, commandRequestStatus: {}]", tenant, deviceId, commandRequestId, commandRequestStatus); Optional.ofNullable(CommandResponse.from(commandRequestId, payload, contentType, commandRequestStatus)).map(commandResponse -> { // send answer to caller via sender link final Future<CommandResponseSender> responseSender = createCommandResponseSender(tenant, deviceId, commandResponse.getReplyToId()); responseSender.compose(commandResponseSender -> commandResponseSender.sendCommandResponse(commandResponse.getCorrelationId(), contentType, payload, null, commandRequestStatus) ).map(delivery -> { if (delivery.remotelySettled()) { LOG.debug("Command response [command-request-id: {}] acknowledged to sender.", commandRequestId); ctx.response().setStatusCode(HttpURLConnection.HTTP_ACCEPTED); } else { LOG.debug("Command response [command-request-id: {}] failed - not remotely settled by sender.", commandRequestId); ctx.response().setStatusCode(HttpURLConnection.HTTP_UNAVAILABLE); } responseSender.result().close(v -> { }); ctx.response().end(); return delivery; }).otherwise(t -> { LOG.debug("Command response [command-request-id: {}] failed", commandRequestId, t); Optional.ofNullable(responseSender.result()).map(r -> { r.close(v -> { }); return r; }); ctx.response().setStatusCode(HttpURLConnection.HTTP_UNAVAILABLE); ctx.response().end(); return null; }); return commandResponse; }).orElseGet(() -> { HttpUtils.badRequest(ctx, String.format("Cannot process command response message - command-request-id %s or status %s invalid", commandRequestId, commandRequestStatus)); return null; }); } private static Integer getQoSLevel(final String qosValue) { try { if (qosValue == null) { return null; } else { return Integer.parseInt(qosValue) != AT_LEAST_ONCE ? HEADER_QOS_INVALID : AT_LEAST_ONCE; } } catch (NumberFormatException e) { return HEADER_QOS_INVALID; } } }
adapters/http-vertx-base/src/main/java/org/eclipse/hono/adapter/http/AbstractVertxBasedHttpProtocolAdapter.java
/** * Copyright (c) 2016, 2018 Bosch Software Innovations GmbH. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Bosch Software Innovations GmbH - initial creation */ package org.eclipse.hono.adapter.http; import java.net.HttpURLConnection; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import org.apache.qpid.proton.message.Message; import org.eclipse.hono.client.ClientErrorException; import org.eclipse.hono.client.MessageConsumer; import org.eclipse.hono.client.MessageSender; import org.eclipse.hono.service.AbstractProtocolAdapterBase; import org.eclipse.hono.service.auth.device.Device; import org.eclipse.hono.service.command.Command; import org.eclipse.hono.service.command.CommandResponse; import org.eclipse.hono.service.command.CommandResponseSender; import org.eclipse.hono.service.http.DefaultFailureHandler; import org.eclipse.hono.service.http.HttpUtils; import org.eclipse.hono.tracing.TracingHelper; import org.eclipse.hono.util.Constants; import org.eclipse.hono.util.EventConstants; import org.eclipse.hono.util.ResourceIdentifier; import org.eclipse.hono.util.TelemetryConstants; import org.eclipse.hono.util.TenantObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import io.opentracing.Span; import io.opentracing.SpanContext; import io.opentracing.contrib.vertx.ext.web.TracingHandler; import io.opentracing.contrib.vertx.ext.web.WebSpanDecorator; import io.opentracing.tag.Tags; import io.vertx.core.CompositeFuture; import io.vertx.core.Future; import io.vertx.core.buffer.Buffer; import io.vertx.core.http.HttpServer; import io.vertx.core.http.HttpServerOptions; import io.vertx.core.http.HttpServerResponse; import io.vertx.core.json.JsonObject; import io.vertx.ext.web.Router; import io.vertx.ext.web.RoutingContext; import io.vertx.ext.web.handler.BodyHandler; /** * Base class for a Vert.x based Hono protocol adapter that uses the HTTP protocol. * It provides access to the Telemetry and Event API. * * @param <T> The type of configuration properties used by this service. */ public abstract class AbstractVertxBasedHttpProtocolAdapter<T extends HttpProtocolAdapterProperties> extends AbstractProtocolAdapterBase<T> { /** * Default file uploads directory used by Vert.x Web. */ protected static final String DEFAULT_UPLOADS_DIRECTORY = "/tmp"; private static final Logger LOG = LoggerFactory.getLogger(AbstractVertxBasedHttpProtocolAdapter.class); private static final int AT_LEAST_ONCE = 1; private static final int HEADER_QOS_INVALID = -1; private static final String KEY_TIMER_ID = "timerId"; private HttpServer server; private HttpServer insecureServer; private HttpAdapterMetrics metrics; /** * Sets the metrics for this service. * * @param metrics The metrics */ @Autowired public final void setMetrics(final HttpAdapterMetrics metrics) { this.metrics = metrics; } /** * @return 8443 */ @Override public final int getPortDefaultValue() { return 8443; } /** * @return 8080 */ @Override public final int getInsecurePortDefaultValue() { return 8080; } @Override protected final int getActualPort() { return server != null ? server.actualPort() : Constants.PORT_UNCONFIGURED; } @Override protected final int getActualInsecurePort() { return insecureServer != null ? insecureServer.actualPort() : Constants.PORT_UNCONFIGURED; } /** * Sets the http server instance configured to serve requests over a TLS secured socket. * <p> * If no server is set using this method, then a server instance is created during * startup of this adapter based on the <em>config</em> properties and the server options * returned by {@link #getHttpServerOptions()}. * * @param server The http server. * @throws NullPointerException if server is {@code null}. * @throws IllegalArgumentException if the server is already started and listening on an address/port. */ @Autowired(required = false) public final void setHttpServer(final HttpServer server) { Objects.requireNonNull(server); if (server.actualPort() > 0) { throw new IllegalArgumentException("http server must not be started already"); } else { this.server = server; } } /** * Sets the http server instance configured to serve requests over a plain socket. * <p> * If no server is set using this method, then a server instance is created during * startup of this adapter based on the <em>config</em> properties and the server options * returned by {@link #getInsecureHttpServerOptions()}. * * @param server The http server. * @throws NullPointerException if server is {@code null}. * @throws IllegalArgumentException if the server is already started and listening on an address/port. */ @Autowired(required = false) public final void setInsecureHttpServer(final HttpServer server) { Objects.requireNonNull(server); if (server.actualPort() > 0) { throw new IllegalArgumentException("http server must not be started already"); } else { this.insecureServer = server; } } @Override public final void doStart(final Future<Void> startFuture) { checkPortConfiguration() .compose(s -> preStartup()) .compose(s -> { if (metrics == null) { // use default implementation // which simply discards all reported metrics metrics = new HttpAdapterMetrics(); } final Router router = createRouter(); if (router == null) { return Future.failedFuture("no router configured"); } else { addRoutes(router); addTracingHandler(router, -5); // add default handler for failed routes router.route().order(-1).failureHandler(new DefaultFailureHandler()); return CompositeFuture.all(bindSecureHttpServer(router), bindInsecureHttpServer(router)); } }).compose(s -> { try { onStartupSuccess(); startFuture.complete(); } catch (Exception e) { LOG.error("error in onStartupSuccess", e); startFuture.fail(e); } }, startFuture); } /** * Adds a handler for adding an OpenTracing Span to the routing context. * * @param router The router to add the handler to. * @param position The position to add the tracing handler at. */ private void addTracingHandler(final Router router, final int position) { final Map<String, String> customTags = new HashMap<>(); customTags.put(Tags.COMPONENT.getKey(), getTypeName()); addCustomTags(customTags); final List<WebSpanDecorator> decorators = new ArrayList<>(); decorators.add(new ComponentMetaDataDecorator(customTags)); addCustomSpanDecorators(decorators); final TracingHandler tracingHandler = new TracingHandler(tracer, decorators); router.route().order(position).handler(tracingHandler).failureHandler(tracingHandler); } /** * Adds meta data about this adapter to be included in OpenTracing * spans that are used for tracing requests handled by this adapter. * <p> * This method is empty by default. * * @param customTags The existing custom tags to add to. The map will already * include this adapter's {@linkplain #getTypeName() type name} * under key {@link Tags#COMPONENT}. */ protected void addCustomTags(final Map<String, String> customTags) { // empty by default } /** * Adds decorators to apply to the active OpenTracing span on certain * stages of processing requests handled by this adapter. * <p> * This method is empty by default. * * @param decorators The decorators to add to. The list will already * include a {@linkplain ComponentMetaDataDecorator decorator} for * adding standard tags and component specific tags which can be customized by * means of overriding {@link #addCustomTags(Map)}. */ protected void addCustomSpanDecorators(final List<WebSpanDecorator> decorators) { // empty by default } /** * Invoked before the http server is started. * <p> * May be overridden by sub-classes to provide additional startup handling. * * @return A future indicating the outcome of the operation. The start up process fails if the returned future fails. */ protected Future<Void> preStartup() { return Future.succeededFuture(); } /** * Invoked after this adapter has started up successfully. * <p> * May be overridden by sub-classes. */ protected void onStartupSuccess() { // empty } /** * Creates the router for handling requests. * <p> * This method creates a router instance with the following routes: * <ol> * <li>A default route limiting the body size of requests to the maximum payload size set in the <em>config</em> properties.</li> * </ol> * * @return The newly created router (never {@code null}). */ protected Router createRouter() { final Router router = Router.router(vertx); LOG.info("limiting size of inbound request body to {} bytes", getConfig().getMaxPayloadSize()); router.route().handler(BodyHandler.create(DEFAULT_UPLOADS_DIRECTORY).setBodyLimit(getConfig().getMaxPayloadSize())); return router; } /** * Adds custom routes for handling requests. * <p> * This method is invoked right before the http server is started with the value returned by * {@link AbstractVertxBasedHttpProtocolAdapter#createRouter()}. * * @param router The router to add the custom routes to. */ protected abstract void addRoutes(Router router); /** * Gets the options to use for creating the TLS secured http server. * <p> * Subclasses may override this method in order to customize the server. * <p> * This method returns default options with the host and port being set to the corresponding values * from the <em>config</em> properties and using a maximum chunk size of 4096 bytes. * * @return The http server options. */ protected HttpServerOptions getHttpServerOptions() { final HttpServerOptions options = new HttpServerOptions(); options.setHost(getConfig().getBindAddress()).setPort(getConfig().getPort(getPortDefaultValue())) .setMaxChunkSize(4096); addTlsKeyCertOptions(options); addTlsTrustOptions(options); return options; } /** * Gets the options to use for creating the insecure http server. * <p> * Subclasses may override this method in order to customize the server. * <p> * This method returns default options with the host and port being set to the corresponding values * from the <em>config</em> properties and using a maximum chunk size of 4096 bytes. * * @return The http server options. */ protected HttpServerOptions getInsecureHttpServerOptions() { final HttpServerOptions options = new HttpServerOptions(); options.setHost(getConfig().getInsecurePortBindAddress()).setPort(getConfig().getInsecurePort(getInsecurePortDefaultValue())).setMaxChunkSize(4096); return options; } /** * Invoked before the message is sent to the downstream peer. * <p> * Subclasses may override this method in order to customize the message * before it is sent, e.g. adding custom properties. * * @param downstreamMessage The message that will be sent downstream. * @param ctx The routing context. */ protected void customizeDownstreamMessage(final Message downstreamMessage, final RoutingContext ctx) { // this default implementation does nothing } /** * Gets the authenticated device identity from the routing context. * * @param ctx The routing context. * @return The device or {@code null} if the device has not been authenticated. */ protected final Device getAuthenticatedDevice(final RoutingContext ctx) { return Optional.ofNullable(ctx.user()).map(user -> { if (Device.class.isInstance(user)) { return (Device) user; } else { return null; } }).orElse(null); } private Future<HttpServer> bindSecureHttpServer(final Router router) { if (isSecurePortEnabled()) { final Future<HttpServer> result = Future.future(); final String bindAddress = server == null ? getConfig().getBindAddress() : "?"; if (server == null) { server = vertx.createHttpServer(getHttpServerOptions()); } server.requestHandler(router::accept).listen(done -> { if (done.succeeded()) { LOG.info("secure http server listening on {}:{}", bindAddress, server.actualPort()); result.complete(done.result()); } else { LOG.error("error while starting up secure http server", done.cause()); result.fail(done.cause()); } }); return result; } else { return Future.succeededFuture(); } } private Future<HttpServer> bindInsecureHttpServer(final Router router) { if (isInsecurePortEnabled()) { final Future<HttpServer> result = Future.future(); final String bindAddress = insecureServer == null ? getConfig().getInsecurePortBindAddress() : "?"; if (insecureServer == null) { insecureServer = vertx.createHttpServer(getInsecureHttpServerOptions()); } insecureServer.requestHandler(router::accept).listen(done -> { if (done.succeeded()) { LOG.info("insecure http server listening on {}:{}", bindAddress, insecureServer.actualPort()); result.complete(done.result()); } else { LOG.error("error while starting up insecure http server", done.cause()); result.fail(done.cause()); } }); return result; } else { return Future.succeededFuture(); } } @Override public final void doStop(final Future<Void> stopFuture) { try { preShutdown(); } catch (Exception e) { LOG.error("error in preShutdown", e); } final Future<Void> serverStopTracker = Future.future(); if (server != null) { server.close(serverStopTracker.completer()); } else { serverStopTracker.complete(); } final Future<Void> insecureServerStopTracker = Future.future(); if (insecureServer != null) { insecureServer.close(insecureServerStopTracker.completer()); } else { insecureServerStopTracker.complete(); } CompositeFuture.all(serverStopTracker, insecureServerStopTracker) .compose(v -> postShutdown()) .compose(s -> stopFuture.complete(), stopFuture); } /** * Invoked before the Http server is shut down. * May be overridden by sub-classes. */ protected void preShutdown() { // empty } /** * Invoked after the Adapter has been shutdown successfully. * May be overridden by sub-classes to provide further shutdown handling. * * @return A future that has to be completed when this operation is finished. */ protected Future<Void> postShutdown() { return Future.succeededFuture(); } /** * Uploads the body of an HTTP request as a telemetry message to Hono. * <p> * This method simply invokes {@link #uploadTelemetryMessage(RoutingContext, String, String, Buffer, String)} * with objects retrieved from the routing context. * * @param ctx The context to retrieve the message payload and content type from. * @param tenant The tenant of the device that has produced the data. * @param deviceId The id of the device that has produced the data. * @throws NullPointerException if any of the parameters is {@code null}. */ public final void uploadTelemetryMessage(final RoutingContext ctx, final String tenant, final String deviceId) { uploadTelemetryMessage( Objects.requireNonNull(ctx), Objects.requireNonNull(tenant), Objects.requireNonNull(deviceId), ctx.getBody(), HttpUtils.getContentType(ctx)); } /** * Uploads a telemetry message to Hono. * <p> * This method always sends a response to the device. The status code will be set * as specified in the * <a href="https://www.eclipse.org/hono/user-guide/http-adapter/#publish-telemetry-data-authenticated-device"> * HTTP adapter User Guide</a>. * * @param ctx The context to retrieve cookies and the HTTP response from. * @param tenant The tenant of the device that has produced the data. * @param deviceId The id of the device that has produced the data. * @param payload The message payload to send. * @param contentType The content type of the message payload. * @throws NullPointerException if any of response, tenant or device ID is {@code null}. */ public final void uploadTelemetryMessage(final RoutingContext ctx, final String tenant, final String deviceId, final Buffer payload, final String contentType) { doUploadMessage( Objects.requireNonNull(ctx), Objects.requireNonNull(tenant), Objects.requireNonNull(deviceId), payload, contentType, getTelemetrySender(tenant), TelemetryConstants.TELEMETRY_ENDPOINT); } /** * Uploads the body of an HTTP request as an event message to Hono. * <p> * This method simply invokes {@link #uploadEventMessage(RoutingContext, String, String, Buffer, String)} * with objects retrieved from the routing context. * * @param ctx The context to retrieve the message payload and content type from. * @param tenant The tenant of the device that has produced the data. * @param deviceId The id of the device that has produced the data. * @throws NullPointerException if any of the parameters is {@code null}. */ public final void uploadEventMessage(final RoutingContext ctx, final String tenant, final String deviceId) { uploadEventMessage( Objects.requireNonNull(ctx), Objects.requireNonNull(tenant), Objects.requireNonNull(deviceId), ctx.getBody(), HttpUtils.getContentType(ctx)); } /** * Uploads an event message to Hono. * <p> * This method always sends a response to the device. The status code will be set * as specified in the * <a href="https://www.eclipse.org/hono/user-guide/http-adapter/#publish-an-event-authenticated-device"> * HTTP adapter User Guide</a>. * * @param ctx The context to retrieve cookies and the HTTP response from. * @param tenant The tenant of the device that has produced the data. * @param deviceId The id of the device that has produced the data. * @param payload The message payload to send. * @param contentType The content type of the message payload. * @throws NullPointerException if any of response, tenant or device ID is {@code null}. */ public final void uploadEventMessage(final RoutingContext ctx, final String tenant, final String deviceId, final Buffer payload, final String contentType) { doUploadMessage( Objects.requireNonNull(ctx), Objects.requireNonNull(tenant), Objects.requireNonNull(deviceId), payload, contentType, getEventSender(tenant), EventConstants.EVENT_ENDPOINT); } private void doUploadMessage(final RoutingContext ctx, final String tenant, final String deviceId, final Buffer payload, final String contentType, final Future<MessageSender> senderTracker, final String endpointName) { if (!isPayloadOfIndicatedType(payload, contentType)) { HttpUtils.badRequest(ctx, String.format("content type [%s] does not match payload", contentType)); } else { final Integer qosHeader = getQoSLevel(ctx.request().getHeader(Constants.HEADER_QOS_LEVEL)); if (qosHeader != null && qosHeader == HEADER_QOS_INVALID) { HttpUtils.badRequest(ctx, "unsupported QoS-Level header value"); } else { final Future<Void> responseReady = Future.future(); final Device authenticatedDevice = getAuthenticatedDevice(ctx); final SpanContext currentSpan = Optional.ofNullable((Span) ctx.get(TracingHandler.CURRENT_SPAN)).map(span -> { span.setOperationName("upload " + endpointName); TracingHelper.TAG_TLS.set(span, ctx.request().isSSL()); TracingHelper.TAG_AUTHENTICATED.set(span, authenticatedDevice != null); return span.context(); }).orElse(null); final Future<JsonObject> tokenTracker = getRegistrationAssertion(tenant, deviceId, authenticatedDevice); final Future<TenantObject> tenantConfigTracker = getTenantConfiguration(tenant); final Future<MessageConsumer> commandConsumerTracker = createCommandConsumer(tenant, deviceId, ctx, responseReady); CompositeFuture.all(tokenTracker, tenantConfigTracker, senderTracker, commandConsumerTracker).compose(ok -> { if (tenantConfigTracker.result().isAdapterEnabled(getTypeName())) { final MessageSender sender = senderTracker.result(); final Message downstreamMessage = newMessage( ResourceIdentifier.from(endpointName, tenant, deviceId), sender.isRegistrationAssertionRequired(), ctx.request().uri(), contentType, payload, tokenTracker.result(), HttpUtils.getTimeTilDisconnect(ctx)); customizeDownstreamMessage(downstreamMessage, ctx); addConnectionCloseHandler(ctx, commandConsumerTracker.result(), tenant, deviceId); if (qosHeader == null) { return CompositeFuture.all(sender.send(downstreamMessage, currentSpan), responseReady); } else { return CompositeFuture.all(sender.sendAndWaitForOutcome(downstreamMessage, currentSpan), responseReady); } } else { // this adapter is not enabled for the tenant return Future.failedFuture(new ClientErrorException(HttpURLConnection.HTTP_FORBIDDEN, "adapter is not enabled for tenant")); } }).compose(delivery -> { if (!ctx.response().closed()) { final Command command = Command.get(ctx); setResponsePayload(ctx.response(), command); ctx.response().bodyEndHandler(ok -> { LOG.trace("successfully processed [{}] message for device [tenantId: {}, deviceId: {}]", endpointName, tenant, deviceId); metrics.incrementProcessedHttpMessages(endpointName, tenant); }); ctx.response().exceptionHandler(t -> { LOG.debug("failed to send http response for [{}] message from device [tenantId: {}, deviceId: {}]", endpointName, tenant, deviceId, t); if (command != null) { final CommandResponse response = CommandResponse.from(command.getRequestId(), HttpURLConnection.HTTP_UNAVAILABLE); sendCommandResponse(tenant, deviceId, response); } }); ctx.response().end(); } return Future.succeededFuture(); }).recover(t -> { LOG.debug("cannot process [{}] message from device [tenantId: {}, deviceId: {}]", endpointName, tenant, deviceId, t); final Command command = Command.get(ctx); if (command != null) { final CommandResponse response = CommandResponse.from(command.getRequestId(), HttpURLConnection.HTTP_UNAVAILABLE); sendCommandResponse(tenant, deviceId, response); } if (ClientErrorException.class.isInstance(t)) { final ClientErrorException e = (ClientErrorException) t; ctx.fail(e); } else { metrics.incrementUndeliverableHttpMessages(endpointName, tenant); HttpUtils.serviceUnavailable(ctx, 2, "temporarily unavailable"); } return Future.failedFuture(t); }); } } } /** * Attach a handler that is called if a command consumer was opened and the client closes the HTTP connection before a response * with a possible command could be sent. * <p> * In this case, the handler closes the command consumer since a command could not be added to the response anymore. * The application receives an {@link HttpURLConnection#HTTP_UNAVAILABLE} if trying to send the command and can repeat * it later. * * @param ctx The context to retrieve cookies and the HTTP response from. * @param messageConsumer The message consumer to receive a command. Maybe {@code null} - in this case no handler is attached. * @param tenantId The tenant that the device belongs to. * @param deviceId The identifier of the device. */ private void addConnectionCloseHandler(final RoutingContext ctx, final MessageConsumer messageConsumer, final String tenantId, final String deviceId) { Optional.ofNullable(messageConsumer).map(consumer -> { if (!ctx.response().closed()) { ctx.response().closeHandler(v -> { cancelCommandReceptionTimer(ctx); LOG.debug("Connection was closed before response could be sent - closing command consumer for device [tenantId: {}, deviceId: {}]", tenantId, deviceId); getCommandConnection().closeCommandConsumer(tenantId, deviceId).setHandler(result -> { if (result.failed()) { LOG.warn("Close command consumer failed", result.cause()); } }); }); } return consumer; }); } private void setResponsePayload(final HttpServerResponse response, final Command command) { if (command == null) { response.setStatusCode(HttpURLConnection.HTTP_ACCEPTED); } else { LOG.trace("adding command [name: {}, request-id: {}] to response for device [tenant-id: {}, device-id: {}]", command.getName(), command.getRequestId(), command.getTenant(), command.getDeviceId()); response.setStatusCode(HttpURLConnection.HTTP_OK); response.putHeader(Constants.HEADER_COMMAND, command.getName()); response.putHeader(Constants.HEADER_COMMAND_REQUEST_ID, command.getRequestId()); HttpUtils.setResponseBody(response, command.getPayload()); } } /** * Creates a consumer for command messages to be sent to a device. * * @param tenantId The tenant that the device belongs to. * @param deviceId The identifier of the device. * @param ctx The device's currently executing HTTP request. * @param responseReady A future to complete once one of the following conditions are met: * <ul> * <li>the request did not include a <em>hono-ttd</em> parameter or</li> * <li>a command has been received and the response ready future has not yet been * completed or</li> * <li>the ttd has expired</li> * </ul> * @return A future indicating the outcome. * The future will be completed with the created message consumer or it will * be failed with a {@code ServiceInvocationException} if the consumer * could not be created. */ protected final Future<MessageConsumer> createCommandConsumer( final String tenantId, final String deviceId, final RoutingContext ctx, final Future<Void> responseReady) { final long ttdMillis = Optional.ofNullable(HttpUtils.getTimeTilDisconnect(ctx)).map(ttd -> ttd * 1000L).orElse(0L); if (ttdMillis <= 0) { // no need to wait for a command responseReady.tryComplete(); return Future.succeededFuture(); } else { return getCommandConnection().getOrCreateCommandConsumer( tenantId, deviceId, createCommandMessageConsumer(tenantId, deviceId, receivedCommand -> { getCommandConnection().closeCommandConsumer(tenantId, deviceId).setHandler(v -> { if (responseReady.isComplete()) { // the timer has already fired, release the command receivedCommand.release(); } else { // put command to routing context and notify receivedCommand.put(ctx); cancelCommandReceptionTimer(ctx); responseReady.tryComplete(); } if (v.failed()) { LOG.warn("Close command consumer failed", v.cause()); } }); }), remoteDetach -> { LOG.debug("peer closed command receiver link [tenant-id: {}, device-id: {}]", tenantId, deviceId); // command consumer is closed by closeHandler, no explicit close necessary here }).map(consumer -> { consumer.flow(1); if (!responseReady.isComplete()) { // if the request was not responded already, add a timer that closes the command consumer after expiry addCommandReceptionTimer(ctx, tenantId, deviceId, responseReady, ttdMillis); } return consumer; }); } } /** * Add a timer that closes the command connection after the given expiry time in milliseconds. * In this case it additionally completes the <em>responseReady</em> Future. * <p> * The timerId is put to the routing context using the key {@link #KEY_TIMER_ID}. * * @param ctx The device's currently executing HTTP request. * @param tenantId The tenant that the device belongs to. * @param deviceId The identifier of the device. * @param responseReady A future to complete if the timer expired. * @param expiryTimeInMillis The expiry time of the timer. */ private void addCommandReceptionTimer(final RoutingContext ctx, final String tenantId, final String deviceId, final Future<Void> responseReady, final long expiryTimeInMillis) { final Long timerId = ctx.vertx().setTimer(expiryTimeInMillis, id -> { LOG.trace("Command Reception timer fired, id {}", id); if (!responseReady.isComplete()) { // if the request was responded already, responseReady.tryComplete(); getCommandConnection().closeCommandConsumer(tenantId, deviceId).setHandler(v -> { if (v.failed()) { LOG.warn("Close command consumer failed", v.cause()); } }); } else { LOG.trace("Nothing to close for timer since response was sent already"); } }); LOG.trace("Adding command reception timer id {}", timerId); ctx.put(KEY_TIMER_ID, timerId); } private void cancelCommandReceptionTimer(final RoutingContext ctx) { Optional.ofNullable(ctx.get(KEY_TIMER_ID)).map(timerId -> { if ((Long)timerId >= 0) { if (ctx.vertx().cancelTimer((Long)timerId)) { LOG.trace("Cancelled timer id {}", timerId); } else { LOG.debug("Could not cancel timer id {}", timerId); } } return timerId; }); } /** * Uploads a command response message to the Hono server. * * @param ctx The routing context of the HTTP request. * @param tenant The tenant of the device from that a command response was received. * @param deviceId The id of the device from that a command response was received. * @param commandRequestId The id of the command that is responded. * @param commandRequestStatus The status of the command that is responded by the device. * @throws NullPointerException if ctx, tenant or deviceId is {@code null}. * @throws IllegalArgumentException if the commandRequestId cannot be processed since it is invalid, or if the commandRequestStatus * does not contain a valid status code. */ public final void uploadCommandResponseMessage(final RoutingContext ctx, final String tenant, final String deviceId, final String commandRequestId, final Integer commandRequestStatus) { Objects.requireNonNull(ctx); Objects.requireNonNull(tenant); Objects.requireNonNull(deviceId); final Buffer payload = ctx.getBody(); final String contentType = HttpUtils.getContentType(ctx); LOG.debug("uploadCommandResponseMessage: [tenantId: {}, deviceId: {}, commandRequestId: {}, commandRequestStatus: {}]", tenant, deviceId, commandRequestId, commandRequestStatus); Optional.ofNullable(CommandResponse.from(commandRequestId, payload, contentType, commandRequestStatus)).map(commandResponse -> { // send answer to caller via sender link final Future<CommandResponseSender> responseSender = createCommandResponseSender(tenant, deviceId, commandResponse.getReplyToId()); responseSender.compose(commandResponseSender -> commandResponseSender.sendCommandResponse(commandResponse.getCorrelationId(), contentType, payload, null, commandRequestStatus) ).map(delivery -> { if (delivery.remotelySettled()) { LOG.debug("Command response [command-request-id: {}] acknowledged to sender.", commandRequestId); ctx.response().setStatusCode(HttpURLConnection.HTTP_ACCEPTED); } else { LOG.debug("Command response [command-request-id: {}] failed - not remotely settled by sender.", commandRequestId); ctx.response().setStatusCode(HttpURLConnection.HTTP_UNAVAILABLE); } responseSender.result().close(v -> { }); ctx.response().end(); return delivery; }).otherwise(t -> { LOG.debug("Command response [command-request-id: {}] failed", commandRequestId, t); Optional.ofNullable(responseSender.result()).map(r -> { r.close(v -> { }); return r; }); ctx.response().setStatusCode(HttpURLConnection.HTTP_UNAVAILABLE); ctx.response().end(); return null; }); return commandResponse; }).orElseGet(() -> { HttpUtils.badRequest(ctx, String.format("Cannot process command response message - command-request-id %s or status %s invalid", commandRequestId, commandRequestStatus)); return null; }); } private static Integer getQoSLevel(final String qosValue) { try { if (qosValue == null) { return null; } else { return Integer.parseInt(qosValue) != AT_LEAST_ONCE ? HEADER_QOS_INVALID : AT_LEAST_ONCE; } } catch (NumberFormatException e) { return HEADER_QOS_INVALID; } } }
Fix tracing handler not being invoked on response body end. Signed-off-by: Kai Hudalla <[email protected]>
adapters/http-vertx-base/src/main/java/org/eclipse/hono/adapter/http/AbstractVertxBasedHttpProtocolAdapter.java
Fix tracing handler not being invoked on response body end.
<ide><path>dapters/http-vertx-base/src/main/java/org/eclipse/hono/adapter/http/AbstractVertxBasedHttpProtocolAdapter.java <ide> return Future.failedFuture("no router configured"); <ide> } else { <ide> addRoutes(router); <del> addTracingHandler(router, -5); <del> // add default handler for failed routes <del> router.route().order(-1).failureHandler(new DefaultFailureHandler()); <ide> return CompositeFuture.all(bindSecureHttpServer(router), bindInsecureHttpServer(router)); <ide> } <ide> }).compose(s -> { <ide> final Router router = Router.router(vertx); <ide> LOG.info("limiting size of inbound request body to {} bytes", getConfig().getMaxPayloadSize()); <ide> router.route().handler(BodyHandler.create(DEFAULT_UPLOADS_DIRECTORY).setBodyLimit(getConfig().getMaxPayloadSize())); <add> addTracingHandler(router, -5); <add> // add default handler for failed routes <add> router.route().order(-1).failureHandler(new DefaultFailureHandler()); <ide> <ide> return router; <ide> } <ide> if (!ctx.response().closed()) { <ide> final Command command = Command.get(ctx); <ide> setResponsePayload(ctx.response(), command); <del> ctx.response().bodyEndHandler(ok -> { <add> ctx.addBodyEndHandler(ok -> { <ide> LOG.trace("successfully processed [{}] message for device [tenantId: {}, deviceId: {}]", <ide> endpointName, tenant, deviceId); <ide> metrics.incrementProcessedHttpMessages(endpointName, tenant);
Java
mit
7e10f5284e5c865b605a2890e8ff98d59c42f24e
0
wizzardo/jrtorrent,wizzardo/jrtorrent
package com.wizzardo.jrt; import com.wizzardo.tools.misc.DateIso8601; import com.wizzardo.tools.misc.SoftThreadLocal; import com.wizzardo.tools.security.Base64; import com.wizzardo.tools.xml.Node; import java.lang.ref.SoftReference; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Map; /** * Created by wizzardo on 05.10.15. */ public class XmlRpc { private static SoftThreadLocal<StringBuilder> stringBuilderThreadLocal = new SoftThreadLocal<StringBuilder>() { @Override protected StringBuilder init() { return new StringBuilder(); } @Override public StringBuilder getValue() { StringBuilder value = super.getValue(); value.setLength(0); return value; } }; String method; Params params = new Params(); public XmlRpc(String method) { this(method, new Params()); } public XmlRpc(String method, Params params) { this.method = method; this.params = params; } public XmlRpc(String method, String... params) { this.method = method; this.params = new Params(); for (String param : params) { this.params.add(param); } } String render() { Node params = new Node("params"); for (Param param : this.params.params) { params.add(param.render()); } return new Node("methodCall") .add(new Node("methodName").addText(method)) .add(params) .toXML(false, stringBuilderThreadLocal.getValue()).toString(); } interface Param { void render(Node value); default Node value() { Node value = new Node("value"); render(value); return value; } default Node render() { Node param = new Node("param"); param.add(value()); return param; } static Param from(int i) { return v -> v.add(new Node("int").addText(String.valueOf(i))); } static Param from(boolean b) { return v -> v.add(new Node("boolean").addText(b ? "1" : "0")); } static Param from(double d) { return v -> v.add(new Node("double").addText(String.valueOf(d))); } static Param from(String s) { if (s == null) return nil(); return v -> v.add(new Node("string").addText(String.valueOf(s))); } static Param from(byte[] bytes) { if (bytes == null) return nil(); return v -> v.add(new Node("base64").addText(Base64.encodeToString(bytes))); } static Param from(Date date) { if (date == null) return nil(); return v -> v.add(new Node("dateTime.iso8601").addText(DateIso8601.format(date))); } static Param from(List<Param> array) { if (array == null) return nil(); return v -> { Node data = new Node("data"); v.add(new Node("array").add(data)); for (Param param : array) { data.add(param.value()); } }; } static Param from(Map<String, Param> map) { if (map == null) return nil(); return v -> { Node struct = new Node("struct"); v.add(struct); for (Map.Entry<String, Param> entry : map.entrySet()) { struct.add(new Node("member") .add(new Node("name").addText(entry.getKey())) .add(entry.getValue().value()) ); } }; } static Param from(XmlRpc rpc) { if (rpc == null) return nil(); return v -> { Node struct = new Node("struct"); v.add(struct); struct.add(new Node("member") .add(new Node("name").addText("methodName")) .add(Param.from(rpc.method).value())); struct.add(new Node("member") .add(new Node("name").addText("params")) .add(Param.from(rpc.params.params).value())); }; } static Param nil() { return v -> v.add(new Node("nil")); } } static class Params { List<Param> params = new ArrayList<>(); Params add(int i) { params.add(Param.from(i)); return this; } Params add(double d) { params.add(Param.from(d)); return this; } Params add(boolean b) { params.add(Param.from(b)); return this; } Params add(String s) { s = escape(s); params.add(Param.from(s)); return this; } Params add(byte[] bytes) { params.add(Param.from(bytes)); return this; } Params add(Date date) { params.add(Param.from(date)); return this; } Params add(Params params) { this.params.add(Param.from(params.params)); return this; } Params add(XmlRpc xmlRpc) { this.params.add(Param.from(xmlRpc)); return this; } } static String escape(String s) { return s .replace("&", "&amp;") .replace("\"", "&quot;") .replace("'", "&apos") .replace("<", "&lt;") .replace(">", "&gt;") ; } }
src/main/java/com/wizzardo/jrt/XmlRpc.java
package com.wizzardo.jrt; import com.wizzardo.tools.misc.DateIso8601; import com.wizzardo.tools.misc.SoftThreadLocal; import com.wizzardo.tools.security.Base64; import com.wizzardo.tools.xml.Node; import java.lang.ref.SoftReference; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Map; /** * Created by wizzardo on 05.10.15. */ public class XmlRpc { private static SoftThreadLocal<StringBuilder> stringBuilderThreadLocal = new SoftThreadLocal<StringBuilder>() { @Override protected StringBuilder init() { return new StringBuilder(); } @Override public StringBuilder getValue() { StringBuilder value = super.getValue(); value.setLength(0); return value; } }; String method; Params params = new Params(); public XmlRpc(String method) { this(method, new Params()); } public XmlRpc(String method, Params params) { this.method = method; this.params = params; } public XmlRpc(String method, String... params) { this.method = method; this.params = new Params(); for (String param : params) { this.params.add(param); } } String render() { Node params = new Node("params"); for (Param param : this.params.params) { params.add(param.render()); } return new Node("methodCall") .add(new Node("methodName").addText(method)) .add(params) .toXML(false, stringBuilderThreadLocal.getValue()).toString(); } interface Param { void render(Node value); default Node value() { Node value = new Node("value"); render(value); return value; } default Node render() { Node param = new Node("param"); param.add(value()); return param; } static Param from(int i) { return v -> v.add(new Node("int").addText(String.valueOf(i))); } static Param from(boolean b) { return v -> v.add(new Node("boolean").addText(b ? "1" : "0")); } static Param from(double d) { return v -> v.add(new Node("double").addText(String.valueOf(d))); } static Param from(String s) { if (s == null) return nil(); return v -> v.add(new Node("string").addText(String.valueOf(s))); } static Param from(byte[] bytes) { if (bytes == null) return nil(); return v -> v.add(new Node("base64").addText(Base64.encodeToString(bytes))); } static Param from(Date date) { if (date == null) return nil(); return v -> v.add(new Node("dateTime.iso8601").addText(DateIso8601.format(date))); } static Param from(List<Param> array) { if (array == null) return nil(); return v -> { Node data = new Node("data"); v.add(new Node("array").add(data)); for (Param param : array) { data.add(param.value()); } }; } static Param from(Map<String, Param> map) { if (map == null) return nil(); return v -> { Node struct = new Node("struct"); v.add(struct); for (Map.Entry<String, Param> entry : map.entrySet()) { struct.add(new Node("member") .add(new Node("name").addText(entry.getKey())) .add(entry.getValue().value()) ); } }; } static Param from(XmlRpc rpc) { if (rpc == null) return nil(); return v -> { Node struct = new Node("struct"); v.add(struct); struct.add(new Node("member") .add(new Node("name").addText("methodName")) .add(Param.from(rpc.method).value())); struct.add(new Node("member") .add(new Node("name").addText("params")) .add(Param.from(rpc.params.params).value())); }; } static Param nil() { return v -> v.add(new Node("nil")); } } static class Params { List<Param> params = new ArrayList<>(); Params add(int i) { params.add(Param.from(i)); return this; } Params add(double d) { params.add(Param.from(d)); return this; } Params add(boolean b) { params.add(Param.from(b)); return this; } Params add(String s) { params.add(Param.from(s)); return this; } Params add(byte[] bytes) { params.add(Param.from(bytes)); return this; } Params add(Date date) { params.add(Param.from(date)); return this; } Params add(Params params) { this.params.add(Param.from(params.params)); return this; } Params add(XmlRpc xmlRpc) { this.params.add(Param.from(xmlRpc)); return this; } } }
add escape for strings
src/main/java/com/wizzardo/jrt/XmlRpc.java
add escape for strings
<ide><path>rc/main/java/com/wizzardo/jrt/XmlRpc.java <ide> } <ide> <ide> Params add(String s) { <add> s = escape(s); <ide> params.add(Param.from(s)); <ide> return this; <ide> } <ide> } <ide> } <ide> <add> <add> static String escape(String s) { <add> return s <add> .replace("&", "&amp;") <add> .replace("\"", "&quot;") <add> .replace("'", "&apos") <add> .replace("<", "&lt;") <add> .replace(">", "&gt;") <add> ; <add> } <ide> }
Java
apache-2.0
742460b3c581cbc0f59eb0d21f192deb61826a74
0
Alan502/TagRelatedness,Alan502/TagRelatedness,Alan502/TagRelatedness,Alan502/TagRelatedness
package edu.macalester.tagrelatedness; import edu.cmu.lti.lexical_db.ILexicalDatabase; import edu.cmu.lti.lexical_db.NictWordNet; import edu.cmu.lti.ws4j.RelatednessCalculator; import edu.cmu.lti.ws4j.WS4J; import edu.cmu.lti.ws4j.impl.JiangConrath; import edu.cmu.lti.ws4j.util.WS4JConfiguration; import org.apache.commons.cli.*; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; /** * Created by alan on 6/9/14. */ public class CalculateCorrelation { public static void main(String[] args){ CommandLineParser parser = new PosixParser(); Options options = new Options(); options.addOption(OptionBuilder.withLongOpt("input-file") .withDescription("Input file to calculate the correlation of") .hasArg() .withArgName("FILE") .create()); options.addOption(OptionBuilder.withLongOpt("measure") .withDescription("wordnet") .hasArg() .withArgName("NAME") .create()); HelpFormatter formatter = new HelpFormatter(); try { File input = null; CommandLine line = parser.parse(options, args); if(!options.hasOption("input-file")){ System.out.println("An input file needs to be specified."); formatter.printHelp("Correlation calculator", options); System.exit(1); }else{ input = new File(line.getOptionValue("input-file")); } // If other correlation measures are needed, identify them here with line.getOption("measure") assert null != input; tauBetweenCSVandWordnet(input); } catch (ParseException e) { e.printStackTrace(); } } public static void tauBetweenCSVandWordnet(File file, int threads){ long start = System.nanoTime(); final ArrayList<Double> measurementSimilarities = new ArrayList<Double>(); final ArrayList<Double> wordnetSimilarities = new ArrayList<Double>(); java.util.List<String> lines = null; try { lines = Files.readAllLines(Paths.get(file.getAbsolutePath()), Charset.defaultCharset()); } catch (IOException e) { e.printStackTrace(); } System.out.println("Similarities to add: "+lines.size()); ParallelForEach.loop(lines.subList(0, lines.size()), threads, new Procedure<String>() { public void call(String line){ String[] column = line.split(","); String word1 = column[0].replace("\"", "").replace(" ", ""); String word2 = column[1].replace("\"", "").replace(" ", ""); double jc = WS4J.runJCN(word1, word2); double cc = Double.parseDouble(column[2]); if(jc != 0){ // check that wordnet does have a result for this word pair synchronized (measurementSimilarities) { measurementSimilarities.add(cc); wordnetSimilarities.add(jc); } } } }); System.out.println("Tau: "+KendallsCorrelation.correlation(measurementSimilarities, wordnetSimilarities)); } public static void tauBetweenCSVandWordnet(File file){ tauBetweenCSVandWordnet(file, Runtime.getRuntime().availableProcessors()); } }
src/edu/macalester/tagrelatedness/CalculateCorrelation.java
package edu.macalester.tagrelatedness; import edu.cmu.lti.lexical_db.ILexicalDatabase; import edu.cmu.lti.lexical_db.NictWordNet; import edu.cmu.lti.ws4j.RelatednessCalculator; import edu.cmu.lti.ws4j.WS4J; import edu.cmu.lti.ws4j.impl.JiangConrath; import edu.cmu.lti.ws4j.util.WS4JConfiguration; import org.apache.commons.cli.*; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; /** * Created by alan on 6/9/14. */ public class CalculateCorrelation { public static void main(String[] args){ CommandLineParser parser = new PosixParser(); Options options = new Options(); options.addOption(OptionBuilder.withLongOpt("--input-file") .withDescription("Input file to calculate the correlation of") .hasArg() .withArgName("FILE") .create()); options.addOption(OptionBuilder.withLongOpt("--measure") .withDescription("wordnet") .hasArg() .withArgName("NAME") .create()); HelpFormatter formatter = new HelpFormatter(); try { File input = null; CommandLine line = parser.parse(options, args); if(!options.hasOption("input-file")){ System.out.println("An input file needs to be specified."); formatter.printHelp("Correlation calculator", options); System.exit(1); }else{ input = new File(line.getOptionValue("input-file")); } // If other correlation measures are needed, identify them here with line.getOption("measure") assert null != input; tauBetweenCSVandWordnet(input); } catch (ParseException e) { e.printStackTrace(); } } public static void tauBetweenCSVandWordnet(File file, int threads){ long start = System.nanoTime(); final ArrayList<Double> measurementSimilarities = new ArrayList<Double>(); final ArrayList<Double> wordnetSimilarities = new ArrayList<Double>(); java.util.List<String> lines = null; try { lines = Files.readAllLines(Paths.get(file.getAbsolutePath()), Charset.defaultCharset()); } catch (IOException e) { e.printStackTrace(); } System.out.println("Similarities to add: "+lines.size()); ParallelForEach.loop(lines.subList(0, lines.size()), threads, new Procedure<String>() { public void call(String line){ String[] column = line.split(","); String word1 = column[0].replace("\"", "").replace(" ", ""); String word2 = column[1].replace("\"", "").replace(" ", ""); double jc = WS4J.runJCN(word1, word2); double cc = Double.parseDouble(column[2]); if(jc != 0){ // check that wordnet does have a result for this word pair synchronized (measurementSimilarities) { measurementSimilarities.add(cc); wordnetSimilarities.add(jc); } } } }); System.out.println("Tau: "+KendallsCorrelation.correlation(measurementSimilarities, wordnetSimilarities)); } public static void tauBetweenCSVandWordnet(File file){ tauBetweenCSVandWordnet(file, Runtime.getRuntime().availableProcessors()); } }
fixed bug with cmd
src/edu/macalester/tagrelatedness/CalculateCorrelation.java
fixed bug with cmd
<ide><path>rc/edu/macalester/tagrelatedness/CalculateCorrelation.java <ide> public static void main(String[] args){ <ide> CommandLineParser parser = new PosixParser(); <ide> Options options = new Options(); <del> options.addOption(OptionBuilder.withLongOpt("--input-file") <add> options.addOption(OptionBuilder.withLongOpt("input-file") <ide> .withDescription("Input file to calculate the correlation of") <ide> .hasArg() <ide> .withArgName("FILE") <ide> .create()); <del> options.addOption(OptionBuilder.withLongOpt("--measure") <add> options.addOption(OptionBuilder.withLongOpt("measure") <ide> .withDescription("wordnet") <ide> .hasArg() <ide> .withArgName("NAME")
Java
apache-2.0
5074d55be14c4d4d88313d66ba8e6677433d4100
0
tmpgit/intellij-community,ernestp/consulo,diorcety/intellij-community,semonte/intellij-community,semonte/intellij-community,ibinti/intellij-community,ryano144/intellij-community,nicolargo/intellij-community,fitermay/intellij-community,tmpgit/intellij-community,pwoodworth/intellij-community,ryano144/intellij-community,vladmm/intellij-community,fengbaicanhe/intellij-community,jexp/idea2,akosyakov/intellij-community,orekyuu/intellij-community,pwoodworth/intellij-community,wreckJ/intellij-community,samthor/intellij-community,nicolargo/intellij-community,ivan-fedorov/intellij-community,orekyuu/intellij-community,blademainer/intellij-community,xfournet/intellij-community,nicolargo/intellij-community,slisson/intellij-community,akosyakov/intellij-community,vvv1559/intellij-community,michaelgallacher/intellij-community,jexp/idea2,petteyg/intellij-community,SerCeMan/intellij-community,michaelgallacher/intellij-community,amith01994/intellij-community,slisson/intellij-community,pwoodworth/intellij-community,izonder/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,da1z/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,kool79/intellij-community,pwoodworth/intellij-community,mglukhikh/intellij-community,tmpgit/intellij-community,suncycheng/intellij-community,blademainer/intellij-community,jexp/idea2,idea4bsd/idea4bsd,fnouama/intellij-community,michaelgallacher/intellij-community,ahb0327/intellij-community,apixandru/intellij-community,izonder/intellij-community,allotria/intellij-community,ol-loginov/intellij-community,slisson/intellij-community,petteyg/intellij-community,kool79/intellij-community,ahb0327/intellij-community,dslomov/intellij-community,nicolargo/intellij-community,da1z/intellij-community,ftomassetti/intellij-community,lucafavatella/intellij-community,gnuhub/intellij-community,Lekanich/intellij-community,retomerz/intellij-community,TangHao1987/intellij-community,joewalnes/idea-community,tmpgit/intellij-community,MichaelNedzelsky/intellij-community,diorcety/intellij-community,amith01994/intellij-community,suncycheng/intellij-community,salguarnieri/intellij-community,Distrotech/intellij-community,diorcety/intellij-community,izonder/intellij-community,lucafavatella/intellij-community,ivan-fedorov/intellij-community,slisson/intellij-community,apixandru/intellij-community,retomerz/intellij-community,Lekanich/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,retomerz/intellij-community,asedunov/intellij-community,ftomassetti/intellij-community,muntasirsyed/intellij-community,lucafavatella/intellij-community,signed/intellij-community,diorcety/intellij-community,ernestp/consulo,da1z/intellij-community,caot/intellij-community,fengbaicanhe/intellij-community,signed/intellij-community,xfournet/intellij-community,salguarnieri/intellij-community,wreckJ/intellij-community,wreckJ/intellij-community,mglukhikh/intellij-community,holmes/intellij-community,youdonghai/intellij-community,fitermay/intellij-community,da1z/intellij-community,youdonghai/intellij-community,dslomov/intellij-community,izonder/intellij-community,ibinti/intellij-community,joewalnes/idea-community,jagguli/intellij-community,akosyakov/intellij-community,alphafoobar/intellij-community,kool79/intellij-community,robovm/robovm-studio,slisson/intellij-community,Distrotech/intellij-community,supersven/intellij-community,joewalnes/idea-community,hurricup/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,adedayo/intellij-community,signed/intellij-community,adedayo/intellij-community,caot/intellij-community,joewalnes/idea-community,TangHao1987/intellij-community,MER-GROUP/intellij-community,SerCeMan/intellij-community,Lekanich/intellij-community,samthor/intellij-community,vladmm/intellij-community,dslomov/intellij-community,ibinti/intellij-community,salguarnieri/intellij-community,amith01994/intellij-community,holmes/intellij-community,ftomassetti/intellij-community,jagguli/intellij-community,robovm/robovm-studio,kool79/intellij-community,FHannes/intellij-community,asedunov/intellij-community,tmpgit/intellij-community,signed/intellij-community,allotria/intellij-community,consulo/consulo,supersven/intellij-community,ol-loginov/intellij-community,kdwink/intellij-community,FHannes/intellij-community,semonte/intellij-community,holmes/intellij-community,kool79/intellij-community,ftomassetti/intellij-community,ahb0327/intellij-community,amith01994/intellij-community,allotria/intellij-community,holmes/intellij-community,FHannes/intellij-community,TangHao1987/intellij-community,caot/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,gnuhub/intellij-community,petteyg/intellij-community,caot/intellij-community,nicolargo/intellij-community,tmpgit/intellij-community,tmpgit/intellij-community,retomerz/intellij-community,kool79/intellij-community,wreckJ/intellij-community,da1z/intellij-community,ahb0327/intellij-community,vvv1559/intellij-community,allotria/intellij-community,salguarnieri/intellij-community,fitermay/intellij-community,dslomov/intellij-community,michaelgallacher/intellij-community,salguarnieri/intellij-community,izonder/intellij-community,MER-GROUP/intellij-community,joewalnes/idea-community,SerCeMan/intellij-community,izonder/intellij-community,tmpgit/intellij-community,salguarnieri/intellij-community,pwoodworth/intellij-community,petteyg/intellij-community,retomerz/intellij-community,blademainer/intellij-community,vladmm/intellij-community,clumsy/intellij-community,adedayo/intellij-community,ol-loginov/intellij-community,samthor/intellij-community,ahb0327/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,robovm/robovm-studio,wreckJ/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,fnouama/intellij-community,suncycheng/intellij-community,fnouama/intellij-community,signed/intellij-community,FHannes/intellij-community,muntasirsyed/intellij-community,MichaelNedzelsky/intellij-community,MichaelNedzelsky/intellij-community,kdwink/intellij-community,ol-loginov/intellij-community,kool79/intellij-community,ibinti/intellij-community,diorcety/intellij-community,ahb0327/intellij-community,supersven/intellij-community,wreckJ/intellij-community,consulo/consulo,hurricup/intellij-community,jexp/idea2,robovm/robovm-studio,suncycheng/intellij-community,SerCeMan/intellij-community,retomerz/intellij-community,jagguli/intellij-community,vladmm/intellij-community,jexp/idea2,ernestp/consulo,kdwink/intellij-community,supersven/intellij-community,ernestp/consulo,supersven/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,samthor/intellij-community,clumsy/intellij-community,ryano144/intellij-community,mglukhikh/intellij-community,muntasirsyed/intellij-community,mglukhikh/intellij-community,kdwink/intellij-community,muntasirsyed/intellij-community,petteyg/intellij-community,robovm/robovm-studio,vvv1559/intellij-community,lucafavatella/intellij-community,Distrotech/intellij-community,retomerz/intellij-community,Lekanich/intellij-community,michaelgallacher/intellij-community,dslomov/intellij-community,ThiagoGarciaAlves/intellij-community,jagguli/intellij-community,adedayo/intellij-community,diorcety/intellij-community,fnouama/intellij-community,ftomassetti/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,Distrotech/intellij-community,MichaelNedzelsky/intellij-community,lucafavatella/intellij-community,jagguli/intellij-community,jagguli/intellij-community,youdonghai/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,caot/intellij-community,fengbaicanhe/intellij-community,clumsy/intellij-community,wreckJ/intellij-community,blademainer/intellij-community,alphafoobar/intellij-community,petteyg/intellij-community,Distrotech/intellij-community,SerCeMan/intellij-community,alphafoobar/intellij-community,consulo/consulo,samthor/intellij-community,lucafavatella/intellij-community,fnouama/intellij-community,TangHao1987/intellij-community,adedayo/intellij-community,holmes/intellij-community,clumsy/intellij-community,fitermay/intellij-community,amith01994/intellij-community,holmes/intellij-community,dslomov/intellij-community,MER-GROUP/intellij-community,lucafavatella/intellij-community,SerCeMan/intellij-community,xfournet/intellij-community,vladmm/intellij-community,hurricup/intellij-community,FHannes/intellij-community,semonte/intellij-community,kool79/intellij-community,amith01994/intellij-community,wreckJ/intellij-community,xfournet/intellij-community,ryano144/intellij-community,caot/intellij-community,blademainer/intellij-community,izonder/intellij-community,semonte/intellij-community,asedunov/intellij-community,FHannes/intellij-community,MichaelNedzelsky/intellij-community,supersven/intellij-community,jexp/idea2,pwoodworth/intellij-community,SerCeMan/intellij-community,fnouama/intellij-community,retomerz/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,ahb0327/intellij-community,gnuhub/intellij-community,fnouama/intellij-community,clumsy/intellij-community,youdonghai/intellij-community,adedayo/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,ryano144/intellij-community,alphafoobar/intellij-community,ivan-fedorov/intellij-community,suncycheng/intellij-community,ahb0327/intellij-community,suncycheng/intellij-community,SerCeMan/intellij-community,semonte/intellij-community,supersven/intellij-community,ftomassetti/intellij-community,suncycheng/intellij-community,wreckJ/intellij-community,ivan-fedorov/intellij-community,fengbaicanhe/intellij-community,MichaelNedzelsky/intellij-community,xfournet/intellij-community,asedunov/intellij-community,apixandru/intellij-community,asedunov/intellij-community,ivan-fedorov/intellij-community,retomerz/intellij-community,robovm/robovm-studio,gnuhub/intellij-community,ftomassetti/intellij-community,vladmm/intellij-community,youdonghai/intellij-community,ol-loginov/intellij-community,pwoodworth/intellij-community,blademainer/intellij-community,orekyuu/intellij-community,vladmm/intellij-community,ivan-fedorov/intellij-community,clumsy/intellij-community,amith01994/intellij-community,fengbaicanhe/intellij-community,adedayo/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,Lekanich/intellij-community,idea4bsd/idea4bsd,diorcety/intellij-community,gnuhub/intellij-community,hurricup/intellij-community,SerCeMan/intellij-community,ol-loginov/intellij-community,akosyakov/intellij-community,consulo/consulo,clumsy/intellij-community,michaelgallacher/intellij-community,akosyakov/intellij-community,gnuhub/intellij-community,youdonghai/intellij-community,ftomassetti/intellij-community,FHannes/intellij-community,caot/intellij-community,FHannes/intellij-community,gnuhub/intellij-community,nicolargo/intellij-community,amith01994/intellij-community,da1z/intellij-community,allotria/intellij-community,supersven/intellij-community,ryano144/intellij-community,jagguli/intellij-community,kdwink/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,Lekanich/intellij-community,robovm/robovm-studio,pwoodworth/intellij-community,Lekanich/intellij-community,amith01994/intellij-community,Distrotech/intellij-community,apixandru/intellij-community,fnouama/intellij-community,fitermay/intellij-community,apixandru/intellij-community,FHannes/intellij-community,petteyg/intellij-community,Distrotech/intellij-community,suncycheng/intellij-community,TangHao1987/intellij-community,ivan-fedorov/intellij-community,ThiagoGarciaAlves/intellij-community,amith01994/intellij-community,vvv1559/intellij-community,TangHao1987/intellij-community,orekyuu/intellij-community,wreckJ/intellij-community,akosyakov/intellij-community,MER-GROUP/intellij-community,akosyakov/intellij-community,suncycheng/intellij-community,allotria/intellij-community,salguarnieri/intellij-community,ibinti/intellij-community,asedunov/intellij-community,fitermay/intellij-community,TangHao1987/intellij-community,gnuhub/intellij-community,ol-loginov/intellij-community,allotria/intellij-community,ernestp/consulo,ahb0327/intellij-community,xfournet/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,muntasirsyed/intellij-community,ivan-fedorov/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,pwoodworth/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,idea4bsd/idea4bsd,slisson/intellij-community,vvv1559/intellij-community,lucafavatella/intellij-community,consulo/consulo,retomerz/intellij-community,da1z/intellij-community,izonder/intellij-community,asedunov/intellij-community,fengbaicanhe/intellij-community,kdwink/intellij-community,kool79/intellij-community,alphafoobar/intellij-community,alphafoobar/intellij-community,fnouama/intellij-community,jagguli/intellij-community,dslomov/intellij-community,nicolargo/intellij-community,muntasirsyed/intellij-community,dslomov/intellij-community,da1z/intellij-community,fengbaicanhe/intellij-community,TangHao1987/intellij-community,FHannes/intellij-community,TangHao1987/intellij-community,MER-GROUP/intellij-community,diorcety/intellij-community,izonder/intellij-community,adedayo/intellij-community,salguarnieri/intellij-community,asedunov/intellij-community,orekyuu/intellij-community,TangHao1987/intellij-community,clumsy/intellij-community,kdwink/intellij-community,da1z/intellij-community,ryano144/intellij-community,da1z/intellij-community,samthor/intellij-community,ryano144/intellij-community,kdwink/intellij-community,suncycheng/intellij-community,clumsy/intellij-community,ThiagoGarciaAlves/intellij-community,MER-GROUP/intellij-community,holmes/intellij-community,ivan-fedorov/intellij-community,SerCeMan/intellij-community,signed/intellij-community,retomerz/intellij-community,orekyuu/intellij-community,xfournet/intellij-community,alphafoobar/intellij-community,fnouama/intellij-community,supersven/intellij-community,holmes/intellij-community,SerCeMan/intellij-community,nicolargo/intellij-community,hurricup/intellij-community,diorcety/intellij-community,ThiagoGarciaAlves/intellij-community,amith01994/intellij-community,Distrotech/intellij-community,holmes/intellij-community,fengbaicanhe/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,alphafoobar/intellij-community,muntasirsyed/intellij-community,MichaelNedzelsky/intellij-community,consulo/consulo,ivan-fedorov/intellij-community,allotria/intellij-community,tmpgit/intellij-community,mglukhikh/intellij-community,retomerz/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,hurricup/intellij-community,samthor/intellij-community,idea4bsd/idea4bsd,Lekanich/intellij-community,blademainer/intellij-community,diorcety/intellij-community,adedayo/intellij-community,TangHao1987/intellij-community,semonte/intellij-community,tmpgit/intellij-community,vladmm/intellij-community,vvv1559/intellij-community,clumsy/intellij-community,robovm/robovm-studio,SerCeMan/intellij-community,blademainer/intellij-community,asedunov/intellij-community,slisson/intellij-community,ftomassetti/intellij-community,asedunov/intellij-community,nicolargo/intellij-community,adedayo/intellij-community,petteyg/intellij-community,fengbaicanhe/intellij-community,ftomassetti/intellij-community,adedayo/intellij-community,caot/intellij-community,fengbaicanhe/intellij-community,da1z/intellij-community,petteyg/intellij-community,allotria/intellij-community,apixandru/intellij-community,supersven/intellij-community,orekyuu/intellij-community,apixandru/intellij-community,dslomov/intellij-community,joewalnes/idea-community,youdonghai/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,muntasirsyed/intellij-community,fitermay/intellij-community,ol-loginov/intellij-community,ibinti/intellij-community,ivan-fedorov/intellij-community,akosyakov/intellij-community,orekyuu/intellij-community,amith01994/intellij-community,joewalnes/idea-community,orekyuu/intellij-community,fitermay/intellij-community,akosyakov/intellij-community,kdwink/intellij-community,signed/intellij-community,vvv1559/intellij-community,ahb0327/intellij-community,kool79/intellij-community,gnuhub/intellij-community,ol-loginov/intellij-community,youdonghai/intellij-community,gnuhub/intellij-community,joewalnes/idea-community,salguarnieri/intellij-community,izonder/intellij-community,dslomov/intellij-community,clumsy/intellij-community,ernestp/consulo,fitermay/intellij-community,hurricup/intellij-community,ryano144/intellij-community,orekyuu/intellij-community,Distrotech/intellij-community,izonder/intellij-community,ryano144/intellij-community,slisson/intellij-community,orekyuu/intellij-community,akosyakov/intellij-community,kdwink/intellij-community,fitermay/intellij-community,samthor/intellij-community,izonder/intellij-community,vvv1559/intellij-community,robovm/robovm-studio,idea4bsd/idea4bsd,vvv1559/intellij-community,nicolargo/intellij-community,alphafoobar/intellij-community,jagguli/intellij-community,semonte/intellij-community,hurricup/intellij-community,akosyakov/intellij-community,MER-GROUP/intellij-community,TangHao1987/intellij-community,fengbaicanhe/intellij-community,muntasirsyed/intellij-community,ahb0327/intellij-community,ThiagoGarciaAlves/intellij-community,orekyuu/intellij-community,xfournet/intellij-community,blademainer/intellij-community,wreckJ/intellij-community,tmpgit/intellij-community,alphafoobar/intellij-community,retomerz/intellij-community,ibinti/intellij-community,ol-loginov/intellij-community,signed/intellij-community,allotria/intellij-community,MichaelNedzelsky/intellij-community,Distrotech/intellij-community,Lekanich/intellij-community,ibinti/intellij-community,caot/intellij-community,ahb0327/intellij-community,tmpgit/intellij-community,lucafavatella/intellij-community,MER-GROUP/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,ibinti/intellij-community,pwoodworth/intellij-community,samthor/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,fnouama/intellij-community,jexp/idea2,clumsy/intellij-community,signed/intellij-community,blademainer/intellij-community,fnouama/intellij-community,nicolargo/intellij-community,michaelgallacher/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,salguarnieri/intellij-community,semonte/intellij-community,holmes/intellij-community,MER-GROUP/intellij-community,pwoodworth/intellij-community,signed/intellij-community,ibinti/intellij-community,Lekanich/intellij-community,jagguli/intellij-community,vladmm/intellij-community,diorcety/intellij-community,ibinti/intellij-community,supersven/intellij-community,allotria/intellij-community,asedunov/intellij-community,samthor/intellij-community,salguarnieri/intellij-community,muntasirsyed/intellij-community,semonte/intellij-community,ivan-fedorov/intellij-community,hurricup/intellij-community,vladmm/intellij-community,signed/intellij-community,MichaelNedzelsky/intellij-community,ol-loginov/intellij-community,slisson/intellij-community,xfournet/intellij-community,ftomassetti/intellij-community,blademainer/intellij-community,xfournet/intellij-community,ryano144/intellij-community,diorcety/intellij-community,ibinti/intellij-community,fitermay/intellij-community,FHannes/intellij-community,Distrotech/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,MichaelNedzelsky/intellij-community,apixandru/intellij-community,kool79/intellij-community,caot/intellij-community,xfournet/intellij-community,salguarnieri/intellij-community,vladmm/intellij-community,robovm/robovm-studio,Distrotech/intellij-community,slisson/intellij-community,gnuhub/intellij-community,kdwink/intellij-community,apixandru/intellij-community,vladmm/intellij-community,petteyg/intellij-community,suncycheng/intellij-community,pwoodworth/intellij-community,adedayo/intellij-community,robovm/robovm-studio,MichaelNedzelsky/intellij-community,Lekanich/intellij-community,dslomov/intellij-community,MER-GROUP/intellij-community,supersven/intellij-community,nicolargo/intellij-community,muntasirsyed/intellij-community,holmes/intellij-community,samthor/intellij-community,kool79/intellij-community,jagguli/intellij-community,gnuhub/intellij-community,ryano144/intellij-community,joewalnes/idea-community,petteyg/intellij-community,petteyg/intellij-community,MichaelNedzelsky/intellij-community,samthor/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,akosyakov/intellij-community,allotria/intellij-community,slisson/intellij-community,ftomassetti/intellij-community,holmes/intellij-community,ol-loginov/intellij-community,muntasirsyed/intellij-community,hurricup/intellij-community,wreckJ/intellij-community,kdwink/intellij-community,MER-GROUP/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,jexp/idea2,jagguli/intellij-community,robovm/robovm-studio,fitermay/intellij-community,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,dslomov/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,da1z/intellij-community,michaelgallacher/intellij-community,caot/intellij-community,fengbaicanhe/intellij-community,slisson/intellij-community,MER-GROUP/intellij-community,caot/intellij-community,asedunov/intellij-community,allotria/intellij-community,Lekanich/intellij-community
/* * Copyright (c) 2000-2007 JetBrains s.r.o. All Rights Reserved. */ package com.intellij.openapi.deployment; import com.intellij.openapi.roots.libraries.Library; import com.intellij.openapi.roots.libraries.LibraryTable; import com.intellij.openapi.roots.OrderRootType; import com.intellij.openapi.roots.impl.libraries.LibraryEx; import com.intellij.openapi.util.InvalidDataException; import org.jetbrains.annotations.NotNull; import org.jdom.Element; import java.util.List; import java.util.Arrays; /** * @author nik */ class LibraryInfoBasedOnLibrary implements LibraryInfo { private final Library myLibrary; private LibraryInfoImpl myInfoToRestore; public LibraryInfoBasedOnLibrary(@NotNull Library library) { assert !(library instanceof LibraryEx) || !((LibraryEx)library).isDisposed(); myLibrary = library; myInfoToRestore = new LibraryInfoImpl(library); } public String getName() { return myLibrary.getName(); } @NotNull public List<String> getUrls() { return Arrays.asList(myLibrary.getUrls(OrderRootType.CLASSES)); } public String getLevel() { final LibraryTable table = myLibrary.getTable(); return table == null ? LibraryLink.MODULE_LEVEL : table.getTableLevel(); } @NotNull public Library getLibrary() { return myLibrary; } public void addUrl(String url) { } public void readExternal(Element element) throws InvalidDataException { } public LibraryInfoImpl getInfoToRestore() { return myInfoToRestore; } }
compiler/impl/com/intellij/openapi/deployment/LibraryInfoBasedOnLibrary.java
/* * Copyright (c) 2000-2007 JetBrains s.r.o. All Rights Reserved. */ package com.intellij.openapi.deployment; import com.intellij.openapi.roots.libraries.Library; import com.intellij.openapi.roots.libraries.LibraryTable; import com.intellij.openapi.roots.OrderRootType; import com.intellij.openapi.roots.impl.libraries.LibraryEx; import com.intellij.openapi.util.InvalidDataException; import org.jetbrains.annotations.NotNull; import org.jdom.Element; import java.util.List; import java.util.Arrays; /** * @author nik */ class LibraryInfoBasedOnLibrary implements LibraryInfo { private final Library myLibrary; private LibraryInfoImpl myInfoToRestore; public LibraryInfoBasedOnLibrary(@NotNull Library library) { assert !((LibraryEx)library).isDisposed(); myLibrary = library; myInfoToRestore = new LibraryInfoImpl(library); } public String getName() { return myLibrary.getName(); } @NotNull public List<String> getUrls() { return Arrays.asList(myLibrary.getUrls(OrderRootType.CLASSES)); } public String getLevel() { final LibraryTable table = myLibrary.getTable(); return table == null ? LibraryLink.MODULE_LEVEL : table.getTableLevel(); } @NotNull public Library getLibrary() { return myLibrary; } public void addUrl(String url) { } public void readExternal(Element element) throws InvalidDataException { } public LibraryInfoImpl getInfoToRestore() { return myInfoToRestore; } }
IDEADEV-33777
compiler/impl/com/intellij/openapi/deployment/LibraryInfoBasedOnLibrary.java
IDEADEV-33777
<ide><path>ompiler/impl/com/intellij/openapi/deployment/LibraryInfoBasedOnLibrary.java <ide> private LibraryInfoImpl myInfoToRestore; <ide> <ide> public LibraryInfoBasedOnLibrary(@NotNull Library library) { <del> assert !((LibraryEx)library).isDisposed(); <add> assert !(library instanceof LibraryEx) || !((LibraryEx)library).isDisposed(); <ide> myLibrary = library; <ide> myInfoToRestore = new LibraryInfoImpl(library); <ide> }
Java
mit
5a470cddbb82c593f6b3220902eec57d5b920cc4
0
sk89q/CommandHelper,sk89q/CommandHelper,sk89q/CommandHelper,sk89q/CommandHelper
package com.laytonsmith.core; import com.laytonsmith.abstraction.Implementation; import com.laytonsmith.annotations.breakable; import com.laytonsmith.annotations.nolinking; import com.laytonsmith.annotations.unbreakable; import com.laytonsmith.commandhelper.CommandHelperPlugin; import com.laytonsmith.core.Optimizable.OptimizationOption; import com.laytonsmith.core.compiler.FileOptions; import com.laytonsmith.core.compiler.KeywordList; import com.laytonsmith.core.compiler.TokenStream; import com.laytonsmith.core.constructs.CDecimal; import com.laytonsmith.core.constructs.CDouble; import com.laytonsmith.core.constructs.CFunction; import com.laytonsmith.core.constructs.CIdentifier; import com.laytonsmith.core.constructs.CInt; import com.laytonsmith.core.constructs.CKeyword; import com.laytonsmith.core.constructs.CLabel; import com.laytonsmith.core.constructs.CNull; import com.laytonsmith.core.constructs.CPreIdentifier; import com.laytonsmith.core.constructs.CSlice; import com.laytonsmith.core.constructs.CString; import com.laytonsmith.core.constructs.CSymbol; import com.laytonsmith.core.constructs.CVoid; import com.laytonsmith.core.constructs.Construct; import com.laytonsmith.core.constructs.IVariable; import com.laytonsmith.core.constructs.Target; import com.laytonsmith.core.constructs.Token; import com.laytonsmith.core.constructs.Token.TType; import com.laytonsmith.core.constructs.Variable; import com.laytonsmith.core.environments.CommandHelperEnvironment; import com.laytonsmith.core.environments.Environment; import com.laytonsmith.core.environments.GlobalEnv; import com.laytonsmith.core.exceptions.ConfigCompileException; import com.laytonsmith.core.exceptions.ConfigCompileGroupException; import com.laytonsmith.core.exceptions.ConfigRuntimeException; import com.laytonsmith.core.exceptions.ProgramFlowManipulationException; import com.laytonsmith.core.functions.Compiler; import com.laytonsmith.core.functions.DataHandling; import com.laytonsmith.core.functions.Function; import com.laytonsmith.core.functions.FunctionBase; import com.laytonsmith.core.functions.FunctionList; import com.laytonsmith.core.functions.IncludeCache; import com.laytonsmith.core.taskmanager.TaskManager; import com.laytonsmith.persistence.DataSourceException; import java.io.File; import java.io.IOException; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; import java.util.EmptyStackException; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.NoSuchElementException; import java.util.Set; import java.util.Stack; import java.util.concurrent.atomic.AtomicInteger; import java.util.regex.Pattern; /** * The MethodScriptCompiler class handles the various stages of compilation and provides helper methods for execution of * the compiled trees. */ public final class MethodScriptCompiler { private static final EnumSet<Optimizable.OptimizationOption> NO_OPTIMIZATIONS = EnumSet.noneOf(Optimizable.OptimizationOption.class); private MethodScriptCompiler() { } private static final Pattern VAR_PATTERN = Pattern.compile("\\$[\\p{L}0-9_]+"); private static final Pattern IVAR_PATTERN = Pattern.compile(IVariable.VARIABLE_NAME_REGEX); /** * Lexes the script, and turns it into a token stream. This looks through the script character by character. * * @param script The script to lex * @param file The file this script came from, or potentially null if the code is from a dynamic source * @param inPureMScript If the script is in pure MethodScript, this should be true. Pure MethodScript is defined as * code that doesn't have command alias wrappers. * @return A stream of tokens * @throws ConfigCompileException If compilation fails due to bad syntax */ public static TokenStream lex(String script, File file, boolean inPureMScript) throws ConfigCompileException { return lex(script, file, inPureMScript, false); } /** * Lexes the script, and turns it into a token stream. This looks through the script character by character. * * @param script The script to lex * @param file The file this script came from, or potentially null if the code is from a dynamic source * @param inPureMScript If the script is in pure MethodScript, this should be true. Pure MethodScript is defined as * code that doesn't have command alias wrappers. * @param saveAllTokens If this script is planning to be compiled, then this value should always be false, however, * if the calling code needs all tokens for informational purposes (and doesn't plan on actually compiling the code) * then this can be true. If true, all tokens are saved, including comments and (some) whitespace. Given this lexing * stream, the exact source code could be re-constructed. * * A note on whitespace: The whitespace tokens are not guaranteed to be accurate, however, the column information * is. If you have two tokens t1 and t2, each with a value of length 1, where the columns are 1 and 5, then that * means there are 4 spaces between the two. * @return A stream of tokens * @throws ConfigCompileException If compilation fails due to bad syntax */ public static TokenStream lex(String script, File file, boolean inPureMScript, boolean saveAllTokens) throws ConfigCompileException { if(script.isEmpty()) { return new TokenStream(new LinkedList<>(), ""); } if((int) script.charAt(0) == 65279) { // Remove the UTF-8 Byte Order Mark, if present. script = script.substring(1); } final StringBuilder fileOptions = new StringBuilder(); script = script.replaceAll("\r\n", "\n"); script = script + "\n"; final Set<String> keywords = KeywordList.getKeywordNames(); final TokenStream tokenList = new TokenStream(); // Set our state variables. boolean stateInQuote = false; int quoteLineNumberStart = 1; boolean inSmartQuote = false; int smartQuoteLineNumberStart = 1; boolean inComment = false; int commentLineNumberStart = 1; boolean commentIsBlock = false; boolean inOptVar = false; boolean inCommand = (!inPureMScript); boolean inMultiline = false; boolean inSmartComment = false; boolean inFileOptions = false; int fileOptionsLineNumberStart = 1; StringBuilder buf = new StringBuilder(); int lineNum = 1; int column = 1; int lastColumn = 0; Target target = Target.UNKNOWN; // Lex the script character by character. for(int i = 0; i < script.length(); i++) { Character c = script.charAt(i); Character c2 = null; if(i < script.length() - 1) { c2 = script.charAt(i + 1); } column += i - lastColumn; lastColumn = i; if(c == '\n') { lineNum++; column = 1; if(!inMultiline && !inPureMScript) { inCommand = true; } } if(buf.length() == 0) { target = new Target(lineNum, file, column); } // If we are in file options, add the character to the buffer if it's not a file options end character. if(inFileOptions) { // For a '>' character outside of a comment, '\>' would have to be used in file options. // Other characters than '>'cannot be escaped. // If support for more escaped characters would be desired in the future, it could be added here. switch(c) { case '\\': { if(c2 == '>') { // "\>". fileOptions.append('>'); i++; continue; } break; } case '>': { if(saveAllTokens) { tokenList.add(new Token(TType.FILE_OPTIONS_STRING, fileOptions.toString(), target)); tokenList.add(new Token(TType.FILE_OPTIONS_END, ">", target)); } inFileOptions = false; continue; } } fileOptions.append(c); continue; } // Comment handling. This is bypassed if we are in a string. if(!stateInQuote && !inSmartQuote) { switch(c) { // Block comments start (/* and /**) and Double slash line comment start (//). case '/': { if(!inComment) { if(c2 == '*') { // "/*" or "/**". buf.append("/*"); inComment = true; commentIsBlock = true; if(i < script.length() - 2 && script.charAt(i + 2) == '*') { // "/**". inSmartComment = true; buf.append("*"); i++; } commentLineNumberStart = lineNum; i++; continue; } else if(c2 == '/') { // "//". buf.append("//"); inComment = true; i++; continue; } } break; } // Line comment start (#). case '#': { if(!inComment) { // "#". buf.append("#"); inComment = true; continue; } break; } // Block comment end (*/). case '*': { if(inComment && commentIsBlock && c2 == '/') { // "*/". if(saveAllTokens || inSmartComment) { buf.append("*/"); tokenList.add(new Token(inSmartComment ? TType.SMART_COMMENT : TType.COMMENT, buf.toString(), target)); } buf = new StringBuilder(); target = new Target(lineNum, file, column); inComment = false; commentIsBlock = false; inSmartComment = false; i++; continue; } break; } // Line comment end (\n). case '\n': { if(inComment && !commentIsBlock) { // "\n". inComment = false; if(saveAllTokens) { tokenList.add(new Token(TType.COMMENT, buf.toString(), target)); tokenList.add(new Token(TType.NEWLINE, "\n", new Target(lineNum + 1, file, 0))); } buf = new StringBuilder(); target = new Target(lineNum, file, column); continue; } break; } } } // If we are in a comment, add the character to the buffer. if(inComment) { buf.append(c); continue; } // Handle non-comment non-quoted characters. if(!stateInQuote) { // We're not in a comment or quoted string, handle: +=, -=, *=, /=, .=, ->, ++, --, %, **, *, +, -, /, // >=, <=, <<<, >>>, <, >, ===, !==, ==, !=, &&&, |||, &&, ||, !, {, }, .., ., ::, [, =, ], :, comma, // (, ), ;, and whitespace. matched: { Token token; switch(c) { case '+': { if(c2 == '=') { // "+=". token = new Token(TType.PLUS_ASSIGNMENT, "+=", target); i++; } else if(c2 == '+') { // "++". token = new Token(TType.INCREMENT, "++", target); i++; } else { // "+". token = new Token(TType.PLUS, "+", target); } break; } case '-': { if(c2 == '=') { // "-=". token = new Token(TType.MINUS_ASSIGNMENT, "-=", target); i++; } else if(c2 == '-') { // "--". token = new Token(TType.DECREMENT, "--", target); i++; } else if(c2 == '>') { // "->". token = new Token(TType.DEREFERENCE, "->", target); i++; } else { // "-". token = new Token(TType.MINUS, "-", target); } break; } case '*': { if(c2 == '=') { // "*=". token = new Token(TType.MULTIPLICATION_ASSIGNMENT, "*=", target); i++; } else if(c2 == '*') { // "**". token = new Token(TType.EXPONENTIAL, "**", target); i++; } else { // "*". token = new Token(TType.MULTIPLICATION, "*", target); } break; } case '/': { if(c2 == '=') { // "/=". token = new Token(TType.DIVISION_ASSIGNMENT, "/=", target); i++; } else { // "/". // Protect against matching commands. if(Character.isLetter(c2)) { break matched; // Pretend that division didn't match. } token = new Token(TType.DIVISION, "/", target); } break; } case '.': { if(c2 == '=') { // ".=". token = new Token(TType.CONCAT_ASSIGNMENT, ".=", target); i++; } else if(c2 == '.') { // "..". token = new Token(TType.SLICE, "..", target); i++; } else { // ".". token = new Token(TType.DOT, ".", target); } break; } case '%': { token = new Token(TType.MODULO, "%", target); break; } case '>': { if(c2 == '=') { // ">=". token = new Token(TType.GTE, ">=", target); i++; } else if(c2 == '>' && i < script.length() - 2 && script.charAt(i + 2) == '>') { // ">>>". token = new Token(TType.MULTILINE_START, ">>>", target); inMultiline = true; i += 2; } else { // ">". token = new Token(TType.GT, ">", target); } break; } case '<': { if(c2 == '!') { // "<!". if(buf.length() > 0) { tokenList.add(new Token(TType.UNKNOWN, buf.toString(), target)); buf = new StringBuilder(); target = new Target(lineNum, file, column); } if(saveAllTokens) { tokenList.add(new Token(TType.FILE_OPTIONS_START, "<!", target)); } inFileOptions = true; fileOptionsLineNumberStart = lineNum; i++; continue; } else if(c2 == '=') { // "<=". token = new Token(TType.LTE, "<=", target); i++; } else if(c2 == '<' && i < script.length() - 2 && script.charAt(i + 2) == '<') { // "<<<". token = new Token(TType.MULTILINE_END, "<<<", target); inMultiline = false; i += 2; } else { // "<". token = new Token(TType.LT, "<", target); } break; } case '=': { if(c2 == '=') { if(i < script.length() - 2 && script.charAt(i + 2) == '=') { // "===". token = new Token(TType.STRICT_EQUALS, "===", target); i += 2; } else { // "==". token = new Token(TType.EQUALS, "==", target); i++; } } else { // "=". if(inCommand) { if(inOptVar) { token = new Token(TType.OPT_VAR_ASSIGN, "=", target); } else { token = new Token(TType.ALIAS_END, "=", target); inCommand = false; } } else { token = new Token(TType.ASSIGNMENT, "=", target); } } break; } case '!': { if(c2 == '=') { if(i < script.length() - 2 && script.charAt(i + 2) == '=') { // "!==". token = new Token(TType.STRICT_NOT_EQUALS, "!==", target); i += 2; } else { // "!=". token = new Token(TType.NOT_EQUALS, "!=", target); i++; } } else { // "!". token = new Token(TType.LOGICAL_NOT, "!", target); } break; } case '&': { if(c2 == '&') { if(i < script.length() - 2 && script.charAt(i + 2) == '&') { // "&&&". token = new Token(TType.DEFAULT_AND, "&&&", target); i += 2; } else { // "&&". token = new Token(TType.LOGICAL_AND, "&&", target); i++; } } else { // "&". // Bitwise symbols are not used yet. break matched; // Pretend that bitwise AND didn't match. // token = new Token(TType.BIT_AND, "&", target); } break; } case '|': { if(c2 == '|') { if(i < script.length() - 2 && script.charAt(i + 2) == '|') { // "|||". token = new Token(TType.DEFAULT_OR, "|||", target); i += 2; } else { // "||". token = new Token(TType.LOGICAL_OR, "||", target); i++; } } else { // "|". // Bitwise symbols are not used yet. break matched; // Pretend that bitwise OR didn't match. // token = new Token(TType.BIT_OR, "|", target); } break; } // Bitwise symbols are not used yet. // case '^': { // token = new Token(TType.BIT_XOR, "^", target); // break; // } case ':': { if(c2 == ':') { // "::". token = new Token(TType.DEREFERENCE, "::", target); i++; } else { // ":". token = new Token(TType.LABEL, ":", target); } break; } case '{': { token = new Token(TType.LCURLY_BRACKET, "{", target); break; } case '}': { token = new Token(TType.RCURLY_BRACKET, "}", target); break; } case '[': { token = new Token(TType.LSQUARE_BRACKET, "[", target); inOptVar = true; break; } case ']': { token = new Token(TType.RSQUARE_BRACKET, "]", target); inOptVar = false; break; } case ',': { token = new Token(TType.COMMA, ",", target); break; } case ';': { token = new Token(TType.SEMICOLON, ";", target); break; } case '(': { token = new Token(TType.FUNC_START, "(", target); // Handle the buffer or previous token, with the knowledge that a FUNC_START follows. if(buf.length() > 0) { if(saveAllTokens) { // In this case, we need to check for keywords first, because we want to go ahead // and convert into that stage. In the future, we might want to do this // unconditionally, but for now, just go ahead and only do it if saveAllTokens is // true, because we know that won't be used by the compiler. if(KeywordList.getKeywordByName(buf.toString()) != null) { // It's a keyword. tokenList.add(new Token(TType.KEYWORD, buf.toString(), target)); } else { // It's not a keyword, but a normal function. tokenList.add(new Token(TType.FUNC_NAME, buf.toString(), target)); } } else { tokenList.add(new Token(TType.FUNC_NAME, buf.toString(), target)); } buf = new StringBuilder(); target = new Target(lineNum, file, column); } else { // The previous token, if unknown, should be changed to a FUNC_NAME. If it's not // unknown, we may be doing standalone parenthesis, so auto tack on the __autoconcat__ // function. try { int count = 0; Iterator<Token> it = tokenList.descendingIterator(); Token t; while((t = it.next()).type == TType.WHITESPACE) { count++; } if(t.type == TType.UNKNOWN) { t.type = TType.FUNC_NAME; // Go ahead and remove the whitespace here too, they break things. count--; for(int a = 0; a < count; a++) { tokenList.removeLast(); } } else { tokenList.add(new Token(TType.FUNC_NAME, "__autoconcat__", target)); } } catch (NoSuchElementException e) { // This is the first element on the list, so, it's another autoconcat. tokenList.add(new Token(TType.FUNC_NAME, "__autoconcat__", target)); } } break; } case ')': { token = new Token(TType.FUNC_END, ")", target); break; } case ' ': { // Whitespace case #1. token = new Token(TType.WHITESPACE, " ", target); break; } case '\t': { // Whitespace case #2 (TAB). token = new Token(TType.WHITESPACE, "\t", target); break; } default: { // No match was found at this point, so continue matching below. break matched; } } // Add previous characters as UNKNOWN token. if(buf.length() > 0) { tokenList.add(new Token(TType.UNKNOWN, buf.toString(), target)); buf = new StringBuilder(); target = new Target(lineNum, file, column); } // Add the new token to the token list. tokenList.add(token); // Continue lexing. continue; } } // Handle non-comment characters that might start or stop a quoted string. switch(c) { case '\'': { if(stateInQuote && !inSmartQuote) { tokenList.add(new Token(TType.STRING, buf.toString(), target)); buf = new StringBuilder(); target = new Target(lineNum, file, column); stateInQuote = false; continue; } else if(!stateInQuote) { stateInQuote = true; quoteLineNumberStart = lineNum; inSmartQuote = false; if(buf.length() > 0) { tokenList.add(new Token(TType.UNKNOWN, buf.toString(), target)); buf = new StringBuilder(); target = new Target(lineNum, file, column); } continue; } else { // We're in a smart quote. buf.append("'"); } break; } case '"': { if(stateInQuote && inSmartQuote) { tokenList.add(new Token(TType.SMART_STRING, buf.toString(), target)); buf = new StringBuilder(); target = new Target(lineNum, file, column); stateInQuote = false; inSmartQuote = false; continue; } else if(!stateInQuote) { stateInQuote = true; inSmartQuote = true; smartQuoteLineNumberStart = lineNum; if(buf.length() > 0) { tokenList.add(new Token(TType.UNKNOWN, buf.toString(), target)); buf = new StringBuilder(); target = new Target(lineNum, file, column); } continue; } else { // We're in normal quotes. buf.append('"'); } break; } case '\n': { // Append a newline to the buffer if it's quoted. if(stateInQuote) { buf.append(c); } else { // Newline is not quoted. Move the buffer to an UNKNOWN token and add a NEWLINE token. if(buf.length() > 0) { tokenList.add(new Token(TType.UNKNOWN, buf.toString(), target)); buf = new StringBuilder(); target = new Target(lineNum, file, column); } tokenList.add(new Token(TType.NEWLINE, "\n", target)); } continue; } case '\\': { // Handle escaped characters in quotes or a single "\" seperator token otherwise. // Handle backslash character outside of quotes. if(!stateInQuote) { tokenList.add(new Token(TType.SEPERATOR, "\\", target)); break; } // Handle an escape sign in a quote. switch(c2) { case '\\': case '\'': case '"': buf.append(c2); break; case 'n': buf.append('\n'); break; case 'r': buf.append('\r'); break; case 't': buf.append('\t'); break; case '0': buf.append('\0'); break; case 'f': buf.append('\f'); break; // Form feed. case 'v': buf.append('\u000B'); break; // Vertical TAB. case 'a': buf.append('\u0007'); break; // Alarm. case 'b': buf.append('\u0008'); break; // Backspace. case 'u': { // Unicode (4 characters). // Grab the next 4 characters, and check to see if they are numbers. if(i + 5 >= script.length()) { throw new ConfigCompileException("Unrecognized unicode escape sequence", target); } String unicode = script.substring(i + 2, i + 6); int unicodeNum; try { unicodeNum = Integer.parseInt(unicode, 16); } catch (NumberFormatException e) { throw new ConfigCompileException( "Unrecognized unicode escape sequence: \\u" + unicode, target); } buf.append(Character.toChars(unicodeNum)); i += 4; break; } case 'U': { // Unicode (8 characters). // Grab the next 8 characters and check to see if they are numbers. if(i + 9 >= script.length()) { throw new ConfigCompileException("Unrecognized unicode escape sequence", target); } String unicode = script.substring(i + 2, i + 10); int unicodeNum; try { unicodeNum = Integer.parseInt(unicode, 16); } catch (NumberFormatException e) { throw new ConfigCompileException( "Unrecognized unicode escape sequence: \\u" + unicode, target); } buf.append(Character.toChars(unicodeNum)); i += 8; break; } case '@': { if(!inSmartQuote) { throw new ConfigCompileException("The escape sequence \\@ is not" + " a recognized escape sequence in a non-smart string", target); } buf.append("\\@"); break; } default: { // Since we might expand this list later, don't let them use unescaped backslashes. throw new ConfigCompileException( "The escape sequence \\" + c2 + " is not a recognized escape sequence", target); } } i++; continue; } default: { // At this point, only non-comment and non-escaped characters that are not part of a // quote start/end are left. // Disallow Non-Breaking Space Characters. if(!stateInQuote && c == '\u00A0'/*nbsp*/) { throw new ConfigCompileException("NBSP character in script", target); } // Add the characters that didn't match anything to the buffer. buf.append(c); continue; } } } // End of lexing. // Handle unended file options. if(inFileOptions) { throw new ConfigCompileException("Unended file options. You started the the file options on line " + fileOptionsLineNumberStart, target); } // Handle unended string literals. if(stateInQuote) { if(inSmartQuote) { throw new ConfigCompileException("Unended string literal. You started the last double quote on line " + smartQuoteLineNumberStart, target); } else { throw new ConfigCompileException("Unended string literal. You started the last single quote on line " + quoteLineNumberStart, target); } } // Handle unended comment blocks. Since a newline is added to the end of the script, line comments are ended. if(inComment || commentIsBlock) { throw new ConfigCompileException("Unended block comment. You started the comment on line " + commentLineNumberStart, target); } // Look at the tokens and get meaning from them. Also, look for improper symbol locations // and go ahead and absorb unary +- into the token. ListIterator<Token> it = tokenList.listIterator(0); while(it.hasNext()) { Token t = it.next(); // Combine whitespace tokens into one. if(t.type == TType.WHITESPACE && it.hasNext()) { Token next; if((next = it.next()).type == TType.WHITESPACE) { t.value += next.val(); it.remove(); // Remove 'next'. } else { it.previous(); // Select 'next' <--. } it.previous(); // Select 't' <--. it.next(); // Select 't' -->. } // Convert "-" + number to -number if allowed. it.previous(); // Select 't' <--. if(it.hasPrevious() && t.type == TType.UNKNOWN) { Token prev1 = it.previous(); // Select 'prev1' <--. if(prev1.type.isPlusMinus()) { // Find the first non-whitespace token before the '-'. Token prevNonWhitespace = null; while(it.hasPrevious()) { if(it.previous().type != TType.WHITESPACE) { prevNonWhitespace = it.next(); break; } } while(it.next() != prev1) { // Skip until selection is at 'prev1 -->'. } if(prevNonWhitespace != null) { // Convert "±UNKNOWN" if the '±' is used as a sign (and not an add/subtract operation). if(!prevNonWhitespace.type.isIdentifier() // Don't convert "number/string/var ± ...". && prevNonWhitespace.type != TType.FUNC_END // Don't convert "func() ± ...". && prevNonWhitespace.type != TType.RSQUARE_BRACKET // Don't convert "] ± ..." (arrays). && !IVAR_PATTERN.matcher(t.val()).matches() // Don't convert "± @var". && !VAR_PATTERN.matcher(t.val()).matches()) { // Don't convert "± $var". // It is a negative/positive number: Absorb the sign. t.value = prev1.value + t.value; it.remove(); // Remove 'prev1'. } } } else { it.next(); // Select 'prev1' -->. } } it.next(); // Select 't' -->. // Assign a type to all UNKNOWN tokens. if(t.type == TType.UNKNOWN) { if(t.val().charAt(0) == '/' && t.val().length() > 1) { t.type = TType.COMMAND; } else if(t.val().equals("$")) { t.type = TType.FINAL_VAR; } else if(VAR_PATTERN.matcher(t.val()).matches()) { t.type = TType.VARIABLE; } else if(IVAR_PATTERN.matcher(t.val()).matches()) { t.type = TType.IVARIABLE; } else if(t.val().charAt(0) == '@') { throw new ConfigCompileException("IVariables must match the regex: " + IVAR_PATTERN, target); } else if(keywords.contains(t.val())) { t.type = TType.KEYWORD; } else if(t.val().matches("[\t ]*")) { t.type = TType.WHITESPACE; } else { t.type = TType.LIT; } } // Skip this check if we're not in pure mscript. if(inPureMScript) { if(it.hasNext()) { Token next = it.next(); // Select 'next' -->. it.previous(); // Select 'next' <--. it.previous(); // Select 't' <--. if(t.type.isSymbol() && !t.type.isUnary() && !next.type.isUnary()) { if(it.hasPrevious()) { Token prev1 = it.previous(); // Select 'prev1' <--. if(prev1.type.equals(TType.FUNC_START) || prev1.type.equals(TType.COMMA) || next.type.equals(TType.FUNC_END) || next.type.equals(TType.COMMA) || prev1.type.isSymbol() || next.type.isSymbol()) { throw new ConfigCompileException("Unexpected symbol (" + t.val() + ")", t.getTarget()); } it.next(); // Select 'prev1' -->. } } it.next(); // Select 't' -->. } } } // Set file options tokenList.setFileOptions(fileOptions.toString()); // Make sure that the file options are the first non-comment code in the file { boolean foundCode = false; for(Token t : tokenList) { if(t.type.isFileOption()) { if(foundCode) { throw new ConfigCompileException("File options must be the first non-comment section in the" + " code", t.target); } break; } if(!t.type.isComment() && !t.type.isWhitespace()) { foundCode = true; } } } return tokenList; } /** * This function breaks the token stream into parts, separating the aliases/MethodScript from the command triggers * * @param tokenStream * @return * @throws ConfigCompileException */ public static List<Script> preprocess(TokenStream tokenStream) throws ConfigCompileException { if(tokenStream == null || tokenStream.isEmpty()) { return new ArrayList<>(); } // Remove leading newlines. while(!tokenStream.isEmpty() && tokenStream.getFirst().type == TType.NEWLINE) { tokenStream.removeFirst(); // Remove leading newlines. } // Return an empty list if there were only newlines. if(tokenStream.isEmpty()) { return new ArrayList<>(); } // Remove whitespaces and duplicate newlines. { ListIterator<Token> it = tokenStream.listIterator(0); Token token = it.next(); outerLoop: while(true) { switch(token.type) { case WHITESPACE: { it.remove(); // Remove whitespaces. if(!it.hasNext()) { break outerLoop; } token = it.next(); continue outerLoop; } case NEWLINE: { while(it.hasNext()) { if((token = it.next()).type == TType.NEWLINE) { it.remove(); // Remove duplicate newlines. } else { continue outerLoop; } } break outerLoop; } default: { if(!it.hasNext()) { break outerLoop; } token = it.next(); continue outerLoop; } } } } // Handle multiline constructs. // Take out newlines between the '= >>>' and '<<<' tokens (also removing the '>>>' and '<<<' tokens). // Also remove comments and also remove newlines that are behind a '\'. boolean insideMultiline = false; ListIterator<Token> it = tokenStream.listIterator(0); Token token = null; while(it.hasNext()) { token = it.next(); switch(token.type) { case ALIAS_END: { // "=". if(it.hasNext()) { if(it.next().type == TType.MULTILINE_START) { // "= >>>". insideMultiline = true; it.remove(); // Remove multiline start (>>>). it.previous(); // Select 'token' <---. it.next(); // Select 'token' -->. } else { it.previous(); // Select 'next' <---. } } continue; } case MULTILINE_END: { // "<<<". // Handle multiline end token (<<<) without start. if(!insideMultiline) { throw new ConfigCompileException( "Found multiline end symbol, and no multiline start found", token.target); } insideMultiline = false; it.remove(); // Remove multiline end (<<<). continue; } case MULTILINE_START: { // ">>>". // Handle multiline start token (>>>) while already in multiline. if(insideMultiline) { throw new ConfigCompileException("Did not expect a multiline start symbol here," + " are you missing a multiline end symbol above this line?", token.target); } // Handle multiline start token (>>>) without alias end (=) in front. it.previous(); // Select 'token' <--. if(!it.hasPrevious() || it.previous().type != TType.ALIAS_END) { throw new ConfigCompileException( "Multiline symbol must follow the alias_end (=) symbol", token.target); } it.next(); // Select 'prev' -->. it.next(); // Select 'token' -->. continue; } case NEWLINE: { // "\n". // Skip newlines that are inside a multiline construct. if(insideMultiline) { it.remove(); // Remove newline. } continue; } // Remove comments. case COMMENT: case SMART_COMMENT: { it.remove(); // Remove comment. continue; } default: { // Remove newlines that are behind a '\'. if(token.type != TType.STRING && token.val().equals("\\") && it.hasNext()) { if(it.next().type == TType.NEWLINE) { it.remove(); // Remove newline. it.previous(); // Select 'token' <--. it.next(); // Select 'token' -->. } else { it.previous(); // Select 'next' <--. } } } } } assert token != null; // Handle missing multiline end token. if(insideMultiline) { throw new ConfigCompileException("Expecting a multiline end symbol, but your last multiline alias appears to be missing one.", token.target); } // Now that we have all lines minified, we should be able to split on newlines // and easily find the left and right sides. List<Token> left = new ArrayList<>(); List<Token> right = new ArrayList<>(); List<Script> scripts = new ArrayList<>(); tokenLoop: for(it = tokenStream.listIterator(0); it.hasNext();) { Token t = it.next(); // Add all tokens until ALIAS_END (=) or end of stream. while(t.type != TType.ALIAS_END) { if(!it.hasNext()) { break tokenLoop; // End of stream. } left.add(t); t = it.next(); } // Add all tokens until NEWLINE (\n). while(t.type != TType.NEWLINE) { assert it.hasNext(); // All files end with a newline, so end of stream should be impossible here. right.add(t); t = it.next(); } // Create a new script for the obtained left and right if end of stream has not been reached. if(t.type == TType.NEWLINE) { // Check for spurious symbols, which indicate an issue with the script, but ignore any whitespace. for(int j = left.size() - 1; j >= 0; j--) { if(left.get(j).type == TType.NEWLINE) { if(j > 0 && left.get(j - 1).type != TType.WHITESPACE) { throw new ConfigCompileException( "Unexpected token: " + left.get(j - 1).val(), left.get(j - 1).getTarget()); } } } // Create a new script from the command descriptor (left) and code (right) and add it to the list. Script s = new Script(left, right, null, tokenStream.getFileOptions()); scripts.add(s); // Create new left and right array for the next script. left = new ArrayList<>(); right = new ArrayList<>(); } } // Return the scripts. return scripts; } /** * Compiles the token stream into a valid ParseTree. This also includes optimization and reduction. * * @param stream The token stream, as generated by {@link #lex(String, File, boolean) lex} * @return A fully compiled, optimized, and reduced parse tree. If {@code stream} is null or empty, null is * returned. * @throws ConfigCompileException If the script contains syntax errors. Additionally, during optimization, certain * methods may cause compile errors. Any function that can optimize static occurrences and throws a * {@link ConfigRuntimeException} will have that exception converted to a ConfigCompileException. */ public static ParseTree compile(TokenStream stream) throws ConfigCompileException, ConfigCompileGroupException { Set<ConfigCompileException> compilerErrors = new HashSet<>(); if(stream == null || stream.isEmpty()) { return null; } Target unknown; try { //Instead of using Target.UNKNOWN, we can at least set the file. unknown = new Target(0, stream.get(0).target.file(), 0); } catch (Exception e) { unknown = Target.UNKNOWN; } // Remove all newlines and whitespaces. ListIterator<Token> it = stream.listIterator(0); while(it.hasNext()) { if(it.next().type.isWhitespace()) { it.remove(); } } // Get the file options. final FileOptions fileOptions = stream.getFileOptions(); ParseTree tree = new ParseTree(fileOptions); tree.setData(CNull.NULL); Stack<ParseTree> parents = new Stack<>(); /** * constructCount is used to determine if we need to use autoconcat when reaching a FUNC_END. The previous * constructs, if the count is greater than 1, will be moved down into an autoconcat. */ Stack<AtomicInteger> constructCount = new Stack<>(); constructCount.push(new AtomicInteger(0)); parents.push(tree); tree.addChild(new ParseTree(new CFunction("__autoconcat__", unknown), fileOptions)); parents.push(tree.getChildAt(0)); tree = tree.getChildAt(0); constructCount.push(new AtomicInteger(0)); /** * The array stack is used to keep track of the number of square braces in use. */ Stack<AtomicInteger> arrayStack = new Stack<>(); arrayStack.add(new AtomicInteger(-1)); Stack<AtomicInteger> minusArrayStack = new Stack<>(); Stack<AtomicInteger> minusFuncStack = new Stack<>(); int parens = 0; Token t = null; int braceCount = 0; // Create a Token array to iterate over, rather than using the LinkedList's O(n) get() method. Token[] tokenArray = stream.toArray(new Token[stream.size()]); for(int i = 0; i < tokenArray.length; i++) { t = tokenArray[i]; Token prev1 = i - 1 >= 0 ? tokenArray[i - 1] : new Token(TType.UNKNOWN, "", t.target); Token next1 = i + 1 < stream.size() ? tokenArray[i + 1] : new Token(TType.UNKNOWN, "", t.target); Token next2 = i + 2 < stream.size() ? tokenArray[i + 2] : new Token(TType.UNKNOWN, "", t.target); Token next3 = i + 3 < stream.size() ? tokenArray[i + 3] : new Token(TType.UNKNOWN, "", t.target); // Brace handling if(t.type == TType.LCURLY_BRACKET) { ParseTree b = new ParseTree(new CFunction("__cbrace__", t.getTarget()), fileOptions); tree.addChild(b); tree = b; parents.push(b); braceCount++; constructCount.push(new AtomicInteger(0)); continue; } if(t.type == TType.RCURLY_BRACKET) { braceCount--; if(constructCount.peek().get() > 1) { //We need to autoconcat some stuff int stacks = constructCount.peek().get(); int replaceAt = tree.getChildren().size() - stacks; ParseTree c = new ParseTree(new CFunction("__autoconcat__", tree.getTarget()), fileOptions); List<ParseTree> subChildren = new ArrayList<>(); for(int b = replaceAt; b < tree.numberOfChildren(); b++) { subChildren.add(tree.getChildAt(b)); } c.setChildren(subChildren); if(replaceAt > 0) { List<ParseTree> firstChildren = new ArrayList<>(); for(int d = 0; d < replaceAt; d++) { firstChildren.add(tree.getChildAt(d)); } tree.setChildren(firstChildren); } else { tree.removeChildren(); } tree.addChild(c); } parents.pop(); tree = parents.peek(); constructCount.pop(); try { constructCount.peek().incrementAndGet(); } catch (EmptyStackException e) { throw new ConfigCompileException("Unexpected end curly brace", t.target); } continue; } //Associative array/label handling if(t.type == TType.LABEL && tree.getChildren().size() > 0) { //If it's not an atomic identifier it's an error. if(!prev1.type.isAtomicLit() && prev1.type != TType.IVARIABLE && prev1.type != TType.KEYWORD) { ConfigCompileException error = new ConfigCompileException("Invalid label specified", t.getTarget()); if(prev1.type == TType.FUNC_END) { // This is a fairly common mistake, so we have special handling for this, // because otherwise we would get a "Mismatched parenthesis" warning (which doesn't make sense), // and potentially lots of other invalid errors down the line, so we go ahead // and stop compilation at this point. throw error; } compilerErrors.add(error); } // Wrap previous construct in a CLabel ParseTree cc = tree.getChildren().get(tree.getChildren().size() - 1); tree.removeChildAt(tree.getChildren().size() - 1); tree.addChild(new ParseTree(new CLabel(cc.getData()), fileOptions)); continue; } //Array notation handling if(t.type.equals(TType.LSQUARE_BRACKET)) { arrayStack.push(new AtomicInteger(tree.getChildren().size() - 1)); continue; } else if(t.type.equals(TType.RSQUARE_BRACKET)) { boolean emptyArray = false; if(prev1.type.equals(TType.LSQUARE_BRACKET)) { emptyArray = true; } if(arrayStack.size() == 1) { throw new ConfigCompileException("Mismatched square bracket", t.target); } //array is the location of the array int array = arrayStack.pop().get(); //index is the location of the first node with the index int index = array + 1; if(!tree.hasChildren() || array == -1) { throw new ConfigCompileException("Brackets are illegal here", t.target); } ParseTree myArray = tree.getChildAt(array); ParseTree myIndex; if(!emptyArray) { myIndex = new ParseTree(new CFunction("__autoconcat__", myArray.getTarget()), fileOptions); for(int j = index; j < tree.numberOfChildren(); j++) { myIndex.addChild(tree.getChildAt(j)); } } else { myIndex = new ParseTree(new CSlice("0..-1", t.target), fileOptions); } tree.setChildren(tree.getChildren().subList(0, array)); ParseTree arrayGet = new ParseTree(new CFunction("array_get", t.target), fileOptions); arrayGet.addChild(myArray); arrayGet.addChild(myIndex); // Check if the @var[...] had a negating "-" in front. If so, add a neg(). if(!minusArrayStack.isEmpty() && arrayStack.size() + 1 == minusArrayStack.peek().get()) { if(!next1.type.equals(TType.LSQUARE_BRACKET)) { // Wait if there are more array_get's comming. ParseTree negTree = new ParseTree(new CFunction("neg", unknown), fileOptions); negTree.addChild(arrayGet); tree.addChild(negTree); minusArrayStack.pop(); } else { // Negate the next array_get instead, so just add this one to the tree. tree.addChild(arrayGet); } } else { tree.addChild(arrayGet); } constructCount.peek().set(constructCount.peek().get() - myIndex.numberOfChildren()); continue; } //Smart strings if(t.type == TType.SMART_STRING) { if(t.val().contains("@")) { ParseTree function = new ParseTree(fileOptions); function.setData(new CFunction(new Compiler.smart_string().getName(), t.target)); ParseTree string = new ParseTree(fileOptions); string.setData(new CString(t.value, t.target)); function.addChild(string); tree.addChild(function); } else { tree.addChild(new ParseTree(new CString(t.val(), t.target), fileOptions)); } constructCount.peek().incrementAndGet(); continue; } if(t.type == TType.DEREFERENCE) { //Currently unimplemented, but going ahead and making it strict compilerErrors.add(new ConfigCompileException("The '" + t.val() + "' symbol is not currently allowed in raw strings. You must quote all" + " symbols.", t.target)); } if(t.type.equals(TType.FUNC_NAME)) { CFunction func = new CFunction(t.val(), t.target); ParseTree f = new ParseTree(func, fileOptions); tree.addChild(f); constructCount.push(new AtomicInteger(0)); tree = f; parents.push(f); } else if(t.type.equals(TType.FUNC_START)) { if(!prev1.type.equals(TType.FUNC_NAME)) { throw new ConfigCompileException("Unexpected parenthesis", t.target); } parens++; } else if(t.type.equals(TType.FUNC_END)) { if(parens <= 0) { throw new ConfigCompileException("Unexpected parenthesis", t.target); } parens--; parents.pop(); // Pop function. if(constructCount.peek().get() > 1) { //We need to autoconcat some stuff int stacks = constructCount.peek().get(); int replaceAt = tree.getChildren().size() - stacks; ParseTree c = new ParseTree(new CFunction("__autoconcat__", tree.getTarget()), fileOptions); List<ParseTree> subChildren = new ArrayList<>(); for(int b = replaceAt; b < tree.numberOfChildren(); b++) { subChildren.add(tree.getChildAt(b)); } c.setChildren(subChildren); if(replaceAt > 0) { List<ParseTree> firstChildren = new ArrayList<>(); for(int d = 0; d < replaceAt; d++) { firstChildren.add(tree.getChildAt(d)); } tree.setChildren(firstChildren); } else { tree.removeChildren(); } tree.addChild(c); } constructCount.pop(); try { constructCount.peek().incrementAndGet(); } catch (EmptyStackException e) { throw new ConfigCompileException("Unexpected end parenthesis", t.target); } try { tree = parents.peek(); } catch (EmptyStackException e) { throw new ConfigCompileException("Unexpected end parenthesis", t.target); } // Handle "-func(args)" and "-func(args)[index]". if(!minusFuncStack.isEmpty() && minusFuncStack.peek().get() == parens + 1) { if(next1.type.equals(TType.LSQUARE_BRACKET)) { // Move the negation to the array_get which contains this function. minusArrayStack.push(new AtomicInteger(arrayStack.size() + 1)); // +1 because the bracket isn't counted yet. } else { // Negate this function. ParseTree negTree = new ParseTree(new CFunction("neg", unknown), fileOptions); negTree.addChild(tree.getChildAt(tree.numberOfChildren() - 1)); tree.removeChildAt(tree.numberOfChildren() - 1); tree.addChildAt(tree.numberOfChildren(), negTree); } minusFuncStack.pop(); } } else if(t.type.equals(TType.COMMA)) { if(constructCount.peek().get() > 1) { int stacks = constructCount.peek().get(); int replaceAt = tree.getChildren().size() - stacks; ParseTree c = new ParseTree(new CFunction("__autoconcat__", unknown), fileOptions); List<ParseTree> subChildren = new ArrayList<>(); for(int b = replaceAt; b < tree.numberOfChildren(); b++) { subChildren.add(tree.getChildAt(b)); } c.setChildren(subChildren); if(replaceAt > 0) { List<ParseTree> firstChildren = new ArrayList<>(); for(int d = 0; d < replaceAt; d++) { firstChildren.add(tree.getChildAt(d)); } tree.setChildren(firstChildren); } else { tree.removeChildren(); } tree.addChild(c); } constructCount.peek().set(0); continue; } if(t.type == TType.SLICE) { //We got here because the previous token isn't being ignored, because it's //actually a control character, instead of whitespace, but this is a //"empty first" slice notation. Compare this to the code below. try { CSlice slice; String value = next1.val(); if(next1.type == TType.MINUS || next1.type == TType.PLUS) { value = next1.val() + next2.val(); i++; } slice = new CSlice(".." + value, t.getTarget()); i++; tree.addChild(new ParseTree(slice, fileOptions)); constructCount.peek().incrementAndGet(); continue; } catch (ConfigRuntimeException ex) { //CSlice can throw CREs, but at this stage, we have to //turn them into a CCE. throw new ConfigCompileException(ex); } } if(next1.type.equals(TType.SLICE)) { //Slice notation handling try { CSlice slice; if(t.type.isSeparator() || (t.type.isWhitespace() && prev1.type.isSeparator()) || t.type.isKeyword()) { //empty first String value = next2.val(); i++; if(next2.type == TType.MINUS || next2.type == TType.PLUS) { value = next2.val() + next3.val(); i++; } slice = new CSlice(".." + value, next1.getTarget()); if(t.type.isKeyword()) { tree.addChild(new ParseTree(new CKeyword(t.val(), t.getTarget()), fileOptions)); constructCount.peek().incrementAndGet(); } } else if(next2.type.isSeparator() || next2.type.isKeyword()) { //empty last String modifier = ""; if(prev1.type == TType.MINUS || prev1.type == TType.PLUS) { //The negative would have already been inserted into the tree modifier = prev1.val(); tree.removeChildAt(tree.getChildren().size() - 1); } slice = new CSlice(modifier + t.value + "..", t.target); } else { //both are provided String modifier1 = ""; if(prev1.type == TType.MINUS || prev1.type == TType.PLUS) { //It's a negative, incorporate that here, and remove the //minus from the tree modifier1 = prev1.val(); tree.removeChildAt(tree.getChildren().size() - 1); } Token first = t; if(first.type.isWhitespace()) { first = prev1; } Token second = next2; i++; String modifier2 = ""; if(next2.type == TType.MINUS || next2.type == TType.PLUS) { modifier2 = next2.val(); second = next3; i++; } slice = new CSlice(modifier1 + first.value + ".." + modifier2 + second.value, t.target); } i++; tree.addChild(new ParseTree(slice, fileOptions)); constructCount.peek().incrementAndGet(); continue; } catch (ConfigRuntimeException ex) { //CSlice can throw CREs, but at this stage, we have to //turn them into a CCE. throw new ConfigCompileException(ex); } } else if(t.type == TType.LIT) { Construct c = Static.resolveConstruct(t.val(), t.target); if(c instanceof CString && fileOptions.isStrict()) { compilerErrors.add(new ConfigCompileException("Bare strings are not allowed in strict mode", t.target)); } else if((c instanceof CInt || c instanceof CDecimal) && next1.type == TType.DOT && next2.type == TType.LIT) { // make CDouble/CDecimal here because otherwise Long.parseLong() will remove // minus zero before decimals and leading zeroes after decimals try { if(t.value.startsWith("0m")) { // CDecimal String neg = ""; if(prev1.value.equals("-")) { neg = "-"; } c = new CDecimal(neg + t.value.substring(2) + '.' + next2.value, t.target); } else { // CDouble c = new CDouble(Double.parseDouble(t.val() + '.' + next2.val()), t.target); } i += 2; } catch (NumberFormatException e) { // Not a double } } tree.addChild(new ParseTree(c, fileOptions)); constructCount.peek().incrementAndGet(); } else if(t.type.equals(TType.STRING) || t.type.equals(TType.COMMAND)) { tree.addChild(new ParseTree(new CString(t.val(), t.target), fileOptions)); constructCount.peek().incrementAndGet(); } else if(t.type.equals(TType.IDENTIFIER)) { tree.addChild(new ParseTree(new CPreIdentifier(t.val(), t.target), fileOptions)); constructCount.peek().incrementAndGet(); } else if(t.type.isKeyword()) { tree.addChild(new ParseTree(new CKeyword(t.val(), t.getTarget()), fileOptions)); constructCount.peek().incrementAndGet(); } else if(t.type.equals(TType.IVARIABLE)) { tree.addChild(new ParseTree(new IVariable(t.val(), t.target), fileOptions)); constructCount.peek().incrementAndGet(); } else if(t.type.equals(TType.UNKNOWN)) { tree.addChild(new ParseTree(Static.resolveConstruct(t.val(), t.target), fileOptions)); constructCount.peek().incrementAndGet(); } else if(t.type.isSymbol()) { //Logic and math symbols // Attempt to find "-@var" and change it to "neg(@var)" if it's not @a - @b. Else just add the symbol. // Also handles "-function()" and "-@var[index]". if(t.type.equals(TType.MINUS) && !prev1.type.isAtomicLit() && !prev1.type.equals(TType.IVARIABLE) && !prev1.type.equals(TType.VARIABLE) && !prev1.type.equals(TType.RCURLY_BRACKET) && !prev1.type.equals(TType.RSQUARE_BRACKET) && !prev1.type.equals(TType.FUNC_END) && (next1.type.equals(TType.IVARIABLE) || next1.type.equals(TType.VARIABLE) || next1.type.equals(TType.FUNC_NAME))) { // Check if we are negating a value from an array, function or variable. if(next2.type.equals(TType.LSQUARE_BRACKET)) { minusArrayStack.push(new AtomicInteger(arrayStack.size() + 1)); // +1 because the bracket isn't counted yet. } else if(next1.type.equals(TType.FUNC_NAME)) { minusFuncStack.push(new AtomicInteger(parens + 1)); // +1 because the function isn't counted yet. } else { ParseTree negTree = new ParseTree(new CFunction("neg", unknown), fileOptions); negTree.addChild(new ParseTree(new IVariable(next1.value, next1.target), fileOptions)); tree.addChild(negTree); constructCount.peek().incrementAndGet(); i++; // Skip the next variable as we've just handled it. } } else { tree.addChild(new ParseTree(new CSymbol(t.val(), t.type, t.target), fileOptions)); constructCount.peek().incrementAndGet(); } } else if(t.type == TType.DOT) { // Check for doubles that start with a decimal, otherwise concat Construct c = null; if(next1.type == TType.LIT && prev1.type != TType.STRING && prev1.type != TType.SMART_STRING) { try { c = new CDouble(Double.parseDouble('.' + next1.val()), t.target); i++; } catch (NumberFormatException e) { // Not a double } } if(c == null) { c = new CSymbol(".", TType.CONCAT, t.target); } tree.addChild(new ParseTree(c, fileOptions)); constructCount.peek().incrementAndGet(); } else if(t.type.equals(TType.VARIABLE) || t.type.equals(TType.FINAL_VAR)) { tree.addChild(new ParseTree(new Variable(t.val(), null, false, t.type.equals(TType.FINAL_VAR), t.target), fileOptions)); constructCount.peek().incrementAndGet(); //right_vars.add(new Variable(t.val(), null, t.line_num)); } } assert t != null; // Handle mismatching square brackets "[]". assert arrayStack.size() != 0 : "The last element of arrayStack should be present, but it was popped."; if(arrayStack.size() != 1) { // Some starting square bracket '[' was not closed at the end of the script. // Find the last '[' that was not closed and use that as target instead of the last line of the script. Target target = traceMismatchedOpenToken(stream, TType.LSQUARE_BRACKET, TType.RSQUARE_BRACKET); assert target != null : "Mismatched bracket was detected, but target-finding code could not find it."; if(target == null) { target = t.target; } // Throw a CRE. throw new ConfigCompileException("Mismatched square brackets", target); } // Handle mismatching parentheses "()". if(parens != 0) { // Some starting parentheses '(' was not closed at the end of the script. // Find the last '(' that was not closed and use that as target instead of the last line of the script. Target target = traceMismatchedOpenToken(stream, TType.FUNC_START, TType.FUNC_END); assert target != null : "Mismatched parentheses was detected, but target-finding code could not find it."; if(target == null) { target = t.target; } // Throw a CRE. throw new ConfigCompileException("Mismatched parentheses", target); } // Handle mismatching curly braces "{}". if(braceCount != 0) { // Some starting curly brace '{' was not closed at the end of the script. // Find the last '{' that was not closed and use that as target instead of the last line of the script. Target target = traceMismatchedOpenToken(stream, TType.LCURLY_BRACKET, TType.RCURLY_BRACKET); assert target != null : "Mismatched curly brace was detected, but target-finding code could not find it."; if(target == null) { target = t.target; } // Throw a CRE. throw new ConfigCompileException("Mismatched curly braces", target); } Stack<List<Procedure>> procs = new Stack<>(); procs.add(new ArrayList<Procedure>()); processKeywords(tree); optimizeAutoconcats(tree, compilerErrors); optimize(tree, procs, compilerErrors); link(tree, compilerErrors); checkLabels(tree, compilerErrors); checkBreaks(tree, compilerErrors); if(!compilerErrors.isEmpty()) { if(compilerErrors.size() == 1) { // Just throw the one CCE for(ConfigCompileException e : compilerErrors) { throw e; } } else { throw new ConfigCompileGroupException(compilerErrors); } } parents.pop(); tree = parents.pop(); return tree; } /** * Trace target of mismatching open tokens such as '(' in '()' or '{' in '{}'. This should be used when it is * known that there are more start than close tokens, but no target is known for the extra start token. * @param stream - The token stream to scan. * @param openType - The open type, which would be {@link TType#FUNC_START (} for a parentheses check. * @param closeType - The close type, which would be {@link TType#FUNC_END )} for a parentheses check. * @return The target of the last occurrence of the opening type that did not have a matching closing type. * Returns null of no target was found. */ private static Target traceMismatchedOpenToken(TokenStream stream, TType openType, TType closeType) { // Some starting parentheses '(' was not closed at the end of the script. // Find the last '(' that was not closed and use that as target instead of the last line of the script. Iterator<Token> iterator = stream.descendingIterator(); int closingCount = 0; while(iterator.hasNext()) { Token token = iterator.next(); if(token.type == closeType) { closingCount++; } else if(token.type == openType) { if(closingCount <= 0) { return token.target; } closingCount--; } } return null; } /** * Recurses down the tree and ensures that breaks don't bubble up past procedures or the root code tree. * * @param tree * @throws ConfigCompileException */ private static void checkBreaks(ParseTree tree, Set<ConfigCompileException> compilerExceptions) { checkBreaks0(tree, 0, null, compilerExceptions); } private static void checkBreaks0(ParseTree tree, long currentLoops, String lastUnbreakable, Set<ConfigCompileException> compilerErrors) { if(!(tree.getData() instanceof CFunction)) { //Don't care about these return; } if(tree.getData().val().startsWith("_")) { //It's a proc. We need to recurse, but not check this "function" for(ParseTree child : tree.getChildren()) { checkBreaks0(child, currentLoops, lastUnbreakable, compilerErrors); } return; } Function func; try { func = ((CFunction) tree.getData()).getFunction(); } catch (ConfigCompileException ex) { compilerErrors.add(ex); return; } if(func.getClass().getAnnotation(nolinking.class) != null) { // Don't link here return; } // We have special handling for procs and closures, and of course break and the loops. // If any of these are here, we kick into special handling mode. Otherwise, we recurse. if(func instanceof DataHandling._break) { // First grab the counter in the break function. If the break function doesn't // have any children, then 1 is implied. break() requires the argument to be // a CInt, so if it weren't, there would already have been a compile error, so // we can assume it will be a CInt. long breakCounter = 1; if(tree.getChildren().size() == 1) { breakCounter = ((CInt) tree.getChildAt(0).getData()).getInt(); } if(breakCounter > currentLoops) { // Throw an exception, as this would break above a loop. Different error messages // are applied to different cases if(currentLoops == 0) { compilerErrors.add(new ConfigCompileException("The break() function can only break out of loops" + (lastUnbreakable == null ? "." : ", but an attempt to break out of a " + lastUnbreakable + " was detected."), tree.getTarget())); } else { compilerErrors.add(new ConfigCompileException("Too many breaks" + " detected. Check your loop nesting, and set the break count to an appropriate value.", tree.getTarget())); } } return; } if(func.getClass().getAnnotation(unbreakable.class) != null) { // Parse the children like normal, but reset the counter to 0. for(ParseTree child : tree.getChildren()) { checkBreaks0(child, 0, func.getName(), compilerErrors); } return; } if(func.getClass().getAnnotation(breakable.class) != null) { // Don't break yet, still recurse, but up our current loops counter. currentLoops++; } for(ParseTree child : tree.getChildren()) { checkBreaks0(child, currentLoops, lastUnbreakable, compilerErrors); } } /** * Optimizing __autoconcat__ out should happen early, and should happen regardless of whether or not optimizations * are on or off. So this is broken off into a separate optimization procedure, so that the intricacies of the * normal optimizations don't apply to __autoconcat__. * * @param root * @param compilerExceptions */ private static void optimizeAutoconcats(ParseTree root, Set<ConfigCompileException> compilerExceptions) { for(ParseTree child : root.getChildren()) { if(child.hasChildren()) { optimizeAutoconcats(child, compilerExceptions); } } if(root.getData() instanceof CFunction && root.getData().val().equals(__autoconcat__)) { try { ParseTree ret = ((Compiler.__autoconcat__) ((CFunction) root.getData()).getFunction()).optimizeDynamic(root.getTarget(), root.getChildren(), root.getFileOptions()); root.setData(ret.getData()); root.setChildren(ret.getChildren()); } catch (ConfigCompileException ex) { compilerExceptions.add(ex); } } } /** * Recurses down the tree and ensures that there are no dynamic labels. This has to finish completely after * optimization, because the optimizer has no good hook to know when optimization for a unit is fully completed, * until ALL units are fully complete, so this happens separately after optimization, but as apart of the normal * compile process. * * @param tree * @throws ConfigCompileException */ private static void checkLabels(ParseTree tree, Set<ConfigCompileException> compilerErrors) throws ConfigCompileException { // for(ParseTree t : tree.getChildren()){ // if(t.getData() instanceof CLabel){ // if(((CLabel)t.getData()).cVal() instanceof IVariable){ // throw new ConfigCompileException("Variables may not be used as labels", t.getTarget()); // } // } // checkLabels(t); // } } /** * Recurses down the tree and * <ul><li>Links functions</li> * <li>Checks function arguments</li></ul> * This is a separate process from optimization, because optimization ignores any missing functions. * * @param tree */ private static void link(ParseTree tree, Set<ConfigCompileException> compilerErrors) { FunctionBase treeFunction = null; try { treeFunction = FunctionList.getFunction(tree.getData()); if(treeFunction.getClass().getAnnotation(nolinking.class) != null) { //Don't link children of a nolinking function. return; } } catch (ConfigCompileException ex) { //This can happen if the treeFunction isn't a function, is a proc, etc, //but we don't care, we just want to continue. } // Check the argument count, and do any custom linking the function may have if(treeFunction != null) { Integer[] numArgs = treeFunction.numArgs(); if(!Arrays.asList(numArgs).contains(Integer.MAX_VALUE) && !Arrays.asList(numArgs).contains(tree.getChildren().size())) { compilerErrors.add(new ConfigCompileException("Incorrect number of arguments passed to " + tree.getData().val(), tree.getData().getTarget())); } if(treeFunction instanceof Optimizable) { Optimizable op = (Optimizable) treeFunction; if(op.optimizationOptions().contains(OptimizationOption.CUSTOM_LINK)) { try { op.link(tree.getData().getTarget(), tree.getChildren()); } catch (ConfigCompileException ex) { compilerErrors.add(ex); } } } } // Walk the children for(ParseTree child : tree.getChildren()) { if(child.getData() instanceof CFunction) { if(child.getData().val().charAt(0) != '_' || child.getData().val().charAt(1) == '_') { // This will throw an exception if the function doesn't exist. try { FunctionList.getFunction(child.getData()); } catch (ConfigCompileException ex) { compilerErrors.add(ex); } } link(child, compilerErrors); } } } @SuppressWarnings("checkstyle:constantname") // Variable is more clear when named after the function it represents. private static final String __autoconcat__ = new Compiler.__autoconcat__().getName(); /** * Recurses down into the tree, attempting to optimize where possible. A few things have strong coupling, for * information on these items, see the documentation included in the source. * * @param tree * @return */ private static void optimize(ParseTree tree, Stack<List<Procedure>> procs, Set<ConfigCompileException> compilerErrors) { if(tree.isOptimized()) { return; //Don't need to re-run this } // if(tree.getData() instanceof CIdentifier) { // optimize(((CIdentifier) tree.getData()).contained(), procs); // return; // } if(!(tree.getData() instanceof CFunction)) { //There's no way to optimize something that's not a function return; } //If it is a proc definition, we need to go ahead and see if we can add it to the const proc stack if(tree.getData().val().equals("proc")) { procs.push(new ArrayList<Procedure>()); } CFunction cFunction = (CFunction) tree.getData(); Function func; try { func = (Function) FunctionList.getFunction(cFunction); } catch (ConfigCompileException e) { func = null; } if(func != null) { if(func.getClass().getAnnotation(nolinking.class) != null) { //It's an unlinking function, so we need to stop at this point return; } } if(cFunction instanceof CIdentifier) { //Add the child to the identifier ParseTree c = ((CIdentifier) cFunction).contained(); tree.addChild(c); c.getData().setWasIdentifier(true); } List<ParseTree> children = tree.getChildren(); if(func instanceof Optimizable && ((Optimizable) func).optimizationOptions().contains(OptimizationOption.PRIORITY_OPTIMIZATION)) { // This is a priority optimization function, meaning it needs to be optimized before its children are. // This is required when optimization of the children could cause different internal behavior, for instance // if this function is expecting the precense of soem code element, but the child gets optimized out, this // would cause an error, even though the user did in fact provide code in that section. try { ((Optimizable) func).optimizeDynamic(tree.getTarget(), children, tree.getFileOptions()); } catch (ConfigCompileException ex) { // If an error occurs, we will skip the rest of this element compilerErrors.add(ex); return; } catch (ConfigRuntimeException ex) { compilerErrors.add(new ConfigCompileException(ex)); return; } } //Loop through the children, and if any of them are functions that are terminal, truncate. //To explain this further, consider the following: //For the code: concat(die(), msg('')), this diagram shows the abstract syntax tree: // (concat) // / \ // / \ // (die) (msg) //By looking at the code, we can tell that msg() will never be called, because die() will run first, //and since it is a "terminal" function, any code after it will NEVER run. However, consider a more complex condition: // if(@input){ die() msg('1') } else { msg('2') msg('3') } // if(@input) // [true]/ \[false] // / \ // (sconcat) (sconcat) // / \ / \ // / \ / \ // (die) (msg[1])(msg[2]) (msg[3]) //In this case, only msg('1') is guaranteed not to run, msg('2') and msg('3') will still run in some cases. //So, we can optimize out msg('1') in this case, which would cause the tree to become much simpler, therefore a worthwile optimization: // if(@input) // [true]/ \[false] // / \ // (die) (sconcat) // / \ // / \ // (msg[2]) (msg[3]) //We do have to be careful though, because of functions like if, which actually work like this: //if(@var){ die() } else { msg('') } // (if) // / | \ // / | \ // @var (die) (msg) //We can't git rid of the msg() here, because it is actually in another branch. //For the time being, we will simply say that if a function uses execs, it //is a branch (branches always use execs, though using execs doesn't strictly //mean you are a branch type function). for(int i = 0; i < children.size(); i++) { ParseTree t = children.get(i); if(t.getData() instanceof CFunction) { if(t.getData().val().startsWith("_") || (func != null && func.useSpecialExec())) { continue; } Function f; try { f = (Function) FunctionList.getFunction(t.getData()); } catch (ConfigCompileException ex) { continue; } Set<OptimizationOption> options = NO_OPTIMIZATIONS; if(f instanceof Optimizable) { options = ((Optimizable) f).optimizationOptions(); } if(options.contains(OptimizationOption.TERMINAL)) { if(children.size() > i + 1) { //First, a compiler warning CHLog.GetLogger().Log(CHLog.Tags.COMPILER, LogLevel.WARNING, "Unreachable code. Consider removing this code.", children.get(i + 1).getTarget()); //Now, truncate the children for(int j = children.size() - 1; j > i; j--) { children.remove(j); } break; } } } } boolean fullyStatic = true; boolean hasIVars = false; for(ParseTree node : children) { if(node.getData() instanceof CFunction) { optimize(node, procs, compilerErrors); } if(node.getData().isDynamic() && !(node.getData() instanceof IVariable)) { fullyStatic = false; } if(node.getData() instanceof IVariable) { hasIVars = true; } } //In all cases, at this point, we are either unable to optimize, or we will //optimize, so set our optimized variable at this point. tree.setOptimized(true); if(func == null) { //It's a proc call. Let's see if we can optimize it Procedure p = null; loop: for(List<Procedure> proc : procs) { for(Procedure pp : proc) { if(pp.getName().equals(cFunction.val())) { p = pp; break loop; } } } if(p != null) { try { Construct c = DataHandling.proc.optimizeProcedure(p.getTarget(), p, children); if(c != null) { tree.setData(c); tree.removeChildren(); return; } //else Nope, couldn't optimize. } catch (ConfigRuntimeException ex) { //Cool. Caught a runtime error at compile time :D compilerErrors.add(new ConfigCompileException(ex)); } } //else this procedure isn't listed yet. Maybe a compiler error, maybe not, depends, //so we can't for sure say, but we do know we can't optimize this return; } if(tree.getData().val().equals("proc")) { //Check for too few arguments if(children.size() < 2) { compilerErrors.add(new ConfigCompileException("Incorrect number of arguments passed to proc", tree.getData().getTarget())); return; } //We just went out of scope, so we need to pop the layer of Procedures that //are internal to us procs.pop(); //However, as a special function, we *might* be able to get a const proc out of this //Let's see. try { ParseTree root = new ParseTree(new CFunction(__autoconcat__, Target.UNKNOWN), tree.getFileOptions()); Script fakeScript = Script.GenerateScript(root, "*"); Environment env = null; try { if(Implementation.GetServerType().equals(Implementation.Type.BUKKIT)) { CommandHelperPlugin plugin = CommandHelperPlugin.self; GlobalEnv gEnv = new GlobalEnv(plugin.executionQueue, plugin.profiler, plugin.persistenceNetwork, MethodScriptFileLocations.getDefault().getConfigDirectory(), plugin.profiles, new TaskManager()); env = Environment.createEnvironment(gEnv, new CommandHelperEnvironment()); } else { env = Static.GenerateStandaloneEnvironment(false); } } catch (IOException | DataSourceException | URISyntaxException | Profiles.InvalidProfileException e) { // } Procedure myProc = DataHandling.proc.getProcedure(tree.getTarget(), env, fakeScript, children.toArray(new ParseTree[children.size()])); procs.peek().add(myProc); //Yep. So, we can move on with our lives now, and if it's used later, it could possibly be static. } catch (ConfigRuntimeException e) { //Well, they have an error in there somewhere compilerErrors.add(new ConfigCompileException(e)); } catch (NullPointerException e) { //Nope, can't optimize. return; } } //the compiler trick functions know how to deal with it specially, even if everything isn't //static, so do this first. String oldFunctionName = func.getName(); Set<OptimizationOption> options = NO_OPTIMIZATIONS; if(func instanceof Optimizable) { options = ((Optimizable) func).optimizationOptions(); } if(options.contains(OptimizationOption.OPTIMIZE_DYNAMIC)) { try { ParseTree tempNode; try { tempNode = ((Optimizable) func).optimizeDynamic(tree.getData().getTarget(), tree.getChildren(), tree.getFileOptions()); } catch (ConfigRuntimeException e) { //Turn it into a compile exception, then rethrow throw new ConfigCompileException(e); } if(tempNode == Optimizable.PULL_ME_UP) { if(tree.hasChildren()) { tempNode = tree.getChildAt(0); } else { tempNode = null; } } if(tempNode == Optimizable.REMOVE_ME) { tree.setData(new CFunction("p", Target.UNKNOWN)); tree.removeChildren(); } else if(tempNode != null) { tree.setData(tempNode.getData()); tree.setOptimized(tempNode.isOptimized()); tree.setChildren(tempNode.getChildren()); tree.getData().setWasIdentifier(tempNode.getData().wasIdentifier()); optimize(tree, procs, compilerErrors); tree.setOptimized(true); //Some functions can actually make static the arguments, for instance, by pulling up a hardcoded //array, so if they have reversed this, make note of that now if(tempNode.hasBeenMadeStatic()) { fullyStatic = true; } } //else it wasn't an optimization, but a compile check } catch (ConfigCompileException ex) { compilerErrors.add(ex); } } if(!fullyStatic) { return; } //Otherwise, everything is static, or an IVariable and we can proceed. //Note since we could still have IVariables, we have to handle those //specially from here forward if(func.preResolveVariables() && hasIVars) { //Well, this function isn't equipped to deal with IVariables. return; } //It could have optimized by changing the name, in that case, we //don't want to run this now if(tree.getData().getValue().equals(oldFunctionName) && (options.contains(OptimizationOption.OPTIMIZE_CONSTANT) || options.contains(OptimizationOption.CONSTANT_OFFLINE))) { Construct[] constructs = new Construct[tree.getChildren().size()]; for(int i = 0; i < tree.getChildren().size(); i++) { constructs[i] = tree.getChildAt(i).getData(); } try { try { Construct result; if(options.contains(OptimizationOption.CONSTANT_OFFLINE)) { List<Integer> numArgsList = Arrays.asList(func.numArgs()); if(!numArgsList.contains(Integer.MAX_VALUE) && !numArgsList.contains(tree.getChildren().size())) { compilerErrors.add(new ConfigCompileException("Incorrect number of arguments passed to " + tree.getData().val(), tree.getData().getTarget())); result = null; } else { result = func.exec(tree.getData().getTarget(), null, constructs); } } else { result = ((Optimizable) func).optimize(tree.getData().getTarget(), constructs); } //If the result is null, it was just a check, it can't optimize further. if(result != null) { result.setWasIdentifier(tree.getData().wasIdentifier()); tree.setData(result); tree.removeChildren(); } } catch (ConfigRuntimeException e) { //Turn this into a ConfigCompileException, then rethrow throw new ConfigCompileException(e); } } catch (ConfigCompileException ex) { compilerErrors.add(ex); } } //It doesn't know how to optimize. Oh well. } /** * Runs keyword processing on the tree. Note that this is run before optimization, and is a depth first process. * * @param tree */ private static void processKeywords(ParseTree tree) throws ConfigCompileException { // Keyword processing List<ParseTree> children = tree.getChildren(); for(int i = 0; i < children.size(); i++) { ParseTree node = children.get(i); // Keywords can be standalone, or a function can double as a keyword. So we have to check for both // conditions. processKeywords(node); if(node.getData() instanceof CKeyword || (node.getData() instanceof CLabel && ((CLabel) node.getData()).cVal() instanceof CKeyword) || (node.getData() instanceof CFunction && KeywordList.getKeywordByName(node.getData().val()) != null)) { // This looks a bit confusing, but is fairly straightforward. We want to process the child elements of all // remaining nodes, so that subchildren that need processing will be finished, and our current tree level will // be able to independently process it. We don't want to process THIS level though, just the children of this level. for(int j = i + 1; j < children.size(); j++) { processKeywords(children.get(j)); } // Now that all the children of the rest of the chain are processed, we can do the processing of this level. i = KeywordList.getKeywordByName(node.getData().val()).process(children, i); } } } /** * Shorthand for lexing, compiling, and executing a script. * * @param script The textual script to execute * @param file The file it was located in * @param inPureMScript If it is pure MScript, or aliases * @param env The execution environment * @param done The MethodScriptComplete callback (may be null) * @param s A script object (may be null) * @param vars Any $vars (may be null) * @return * @throws ConfigCompileException * @throws com.laytonsmith.core.exceptions.ConfigCompileGroupException This indicates that a group of compile errors * occurred. */ public static Construct execute(String script, File file, boolean inPureMScript, Environment env, MethodScriptComplete done, Script s, List<Variable> vars) throws ConfigCompileException, ConfigCompileGroupException { return execute(compile(lex(script, file, inPureMScript)), env, done, s, vars); } /** * Executes a pre-compiled MethodScript, given the specified Script environment. Both done and script may be null, * and if so, reasonable defaults will be provided. The value sent to done will also be returned, as a Construct, so * this one function may be used synchronously also. * * @param root * @param env * @param done * @param script * @return */ public static Construct execute(ParseTree root, Environment env, MethodScriptComplete done, Script script) { return execute(root, env, done, script, null); } /** * Executes a pre-compiled MethodScript, given the specified Script environment, but also provides a method to set * the constants in the script. * * @param root * @param env * @param done * @param script * @param vars * @return */ public static Construct execute(ParseTree root, Environment env, MethodScriptComplete done, Script script, List<Variable> vars) { if(root == null) { return CVoid.VOID; } if(script == null) { script = new Script(null, null, env.getEnv(GlobalEnv.class).GetLabel(), new FileOptions(new HashMap<>())); } if(vars != null) { Map<String, Variable> varMap = new HashMap<>(); for(Variable v : vars) { varMap.put(v.getVariableName(), v); } for(Construct tempNode : root.getAllData()) { if(tempNode instanceof Variable) { Variable vv = varMap.get(((Variable) tempNode).getVariableName()); if(vv != null) { ((Variable) tempNode).setVal(vv.getDefault()); } else { //The variable is unset. I'm not quite sure what cases would cause this ((Variable) tempNode).setVal(""); } } } } StringBuilder b = new StringBuilder(); Construct returnable = null; for(ParseTree gg : root.getChildren()) { Construct retc = script.eval(gg, env); if(root.numberOfChildren() == 1) { returnable = retc; } String ret = retc instanceof CNull ? "null" : retc.val(); if(ret != null && !ret.trim().isEmpty()) { b.append(ret).append(" "); } } if(done != null) { done.done(b.toString().trim()); } if(returnable != null) { return returnable; } return Static.resolveConstruct(b.toString().trim(), Target.UNKNOWN); } public static void registerAutoIncludes(Environment env, Script s) { for(File f : Static.getAliasCore().autoIncludes) { try { MethodScriptCompiler.execute(IncludeCache.get(f, new Target(0, f, 0)), env, null, s); } catch (ProgramFlowManipulationException e) { ConfigRuntimeException.HandleUncaughtException(ConfigRuntimeException.CreateUncatchableException("Cannot break program flow in auto include files.", e.getTarget()), env); } catch (ConfigRuntimeException e) { e.setEnv(env); ConfigRuntimeException.HandleUncaughtException(e, env); } } } }
src/main/java/com/laytonsmith/core/MethodScriptCompiler.java
package com.laytonsmith.core; import com.laytonsmith.abstraction.Implementation; import com.laytonsmith.annotations.breakable; import com.laytonsmith.annotations.nolinking; import com.laytonsmith.annotations.unbreakable; import com.laytonsmith.commandhelper.CommandHelperPlugin; import com.laytonsmith.core.Optimizable.OptimizationOption; import com.laytonsmith.core.compiler.FileOptions; import com.laytonsmith.core.compiler.KeywordList; import com.laytonsmith.core.compiler.TokenStream; import com.laytonsmith.core.constructs.CDecimal; import com.laytonsmith.core.constructs.CDouble; import com.laytonsmith.core.constructs.CFunction; import com.laytonsmith.core.constructs.CIdentifier; import com.laytonsmith.core.constructs.CInt; import com.laytonsmith.core.constructs.CKeyword; import com.laytonsmith.core.constructs.CLabel; import com.laytonsmith.core.constructs.CNull; import com.laytonsmith.core.constructs.CPreIdentifier; import com.laytonsmith.core.constructs.CSlice; import com.laytonsmith.core.constructs.CString; import com.laytonsmith.core.constructs.CSymbol; import com.laytonsmith.core.constructs.CVoid; import com.laytonsmith.core.constructs.Construct; import com.laytonsmith.core.constructs.IVariable; import com.laytonsmith.core.constructs.Target; import com.laytonsmith.core.constructs.Token; import com.laytonsmith.core.constructs.Token.TType; import com.laytonsmith.core.constructs.Variable; import com.laytonsmith.core.environments.CommandHelperEnvironment; import com.laytonsmith.core.environments.Environment; import com.laytonsmith.core.environments.GlobalEnv; import com.laytonsmith.core.exceptions.ConfigCompileException; import com.laytonsmith.core.exceptions.ConfigCompileGroupException; import com.laytonsmith.core.exceptions.ConfigRuntimeException; import com.laytonsmith.core.exceptions.ProgramFlowManipulationException; import com.laytonsmith.core.functions.Compiler; import com.laytonsmith.core.functions.DataHandling; import com.laytonsmith.core.functions.Function; import com.laytonsmith.core.functions.FunctionBase; import com.laytonsmith.core.functions.FunctionList; import com.laytonsmith.core.functions.IncludeCache; import com.laytonsmith.core.taskmanager.TaskManager; import com.laytonsmith.persistence.DataSourceException; import java.io.File; import java.io.IOException; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; import java.util.EmptyStackException; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.NoSuchElementException; import java.util.Set; import java.util.Stack; import java.util.concurrent.atomic.AtomicInteger; import java.util.regex.Pattern; /** * The MethodScriptCompiler class handles the various stages of compilation and provides helper methods for execution of * the compiled trees. */ public final class MethodScriptCompiler { private static final EnumSet<Optimizable.OptimizationOption> NO_OPTIMIZATIONS = EnumSet.noneOf(Optimizable.OptimizationOption.class); private MethodScriptCompiler() { } private static final Pattern VAR_PATTERN = Pattern.compile("\\$[\\p{L}0-9_]+"); private static final Pattern IVAR_PATTERN = Pattern.compile(IVariable.VARIABLE_NAME_REGEX); /** * Lexes the script, and turns it into a token stream. This looks through the script character by character. * * @param script The script to lex * @param file The file this script came from, or potentially null if the code is from a dynamic source * @param inPureMScript If the script is in pure MethodScript, this should be true. Pure MethodScript is defined as * code that doesn't have command alias wrappers. * @return A stream of tokens * @throws ConfigCompileException If compilation fails due to bad syntax */ public static TokenStream lex(String script, File file, boolean inPureMScript) throws ConfigCompileException { return lex(script, file, inPureMScript, false); } /** * Lexes the script, and turns it into a token stream. This looks through the script character by character. * * @param script The script to lex * @param file The file this script came from, or potentially null if the code is from a dynamic source * @param inPureMScript If the script is in pure MethodScript, this should be true. Pure MethodScript is defined as * code that doesn't have command alias wrappers. * @param saveAllTokens If this script is planning to be compiled, then this value should always be false, however, * if the calling code needs all tokens for informational purposes (and doesn't plan on actually compiling the code) * then this can be true. If true, all tokens are saved, including comments and (some) whitespace. Given this lexing * stream, the exact source code could be re-constructed. * * A note on whitespace: The whitespace tokens are not guaranteed to be accurate, however, the column information * is. If you have two tokens t1 and t2, each with a value of length 1, where the columns are 1 and 5, then that * means there are 4 spaces between the two. * @return A stream of tokens * @throws ConfigCompileException If compilation fails due to bad syntax */ public static TokenStream lex(String script, File file, boolean inPureMScript, boolean saveAllTokens) throws ConfigCompileException { if(script.isEmpty()) { return new TokenStream(new LinkedList<>(), ""); } if((int) script.charAt(0) == 65279) { // Remove the UTF-8 Byte Order Mark, if present. script = script.substring(1); } final StringBuilder fileOptions = new StringBuilder(); script = script.replaceAll("\r\n", "\n"); script = script + "\n"; final Set<String> keywords = KeywordList.getKeywordNames(); final TokenStream tokenList = new TokenStream(); // Set our state variables. boolean stateInQuote = false; int quoteLineNumberStart = 1; boolean inSmartQuote = false; int smartQuoteLineNumberStart = 1; boolean inComment = false; int commentLineNumberStart = 1; boolean commentIsBlock = false; boolean inOptVar = false; boolean inCommand = (!inPureMScript); boolean inMultiline = false; boolean inSmartComment = false; boolean inFileOptions = false; int fileOptionsLineNumberStart = 1; StringBuilder buf = new StringBuilder(); int lineNum = 1; int column = 1; int lastColumn = 0; Target target = Target.UNKNOWN; // Lex the script character by character. for(int i = 0; i < script.length(); i++) { Character c = script.charAt(i); Character c2 = null; if(i < script.length() - 1) { c2 = script.charAt(i + 1); } column += i - lastColumn; lastColumn = i; if(c == '\n') { lineNum++; column = 1; if(!inMultiline && !inPureMScript) { inCommand = true; } } if(buf.length() == 0) { target = new Target(lineNum, file, column); } // If we are in file options, add the character to the buffer if it's not a file options end character. if(inFileOptions) { // For a '>' character outside of a comment, '\>' would have to be used in file options. // Other characters than '>'cannot be escaped. // If support for more escaped characters would be desired in the future, it could be added here. switch(c) { case '\\': { if(c2 == '>') { // "\>". fileOptions.append('>'); i++; continue; } break; } case '>': { if(saveAllTokens) { tokenList.add(new Token(TType.FILE_OPTIONS_STRING, fileOptions.toString(), target)); tokenList.add(new Token(TType.FILE_OPTIONS_END, ">", target)); } inFileOptions = false; continue; } } fileOptions.append(c); continue; } // Comment handling. This is bypassed if we are in a string. if(!stateInQuote && !inSmartQuote) { switch(c) { // Block comments start (/* and /**) and Double slash line comment start (//). case '/': { if(!inComment) { if(c2 == '*') { // "/*" or "/**". buf.append("/*"); inComment = true; commentIsBlock = true; if(i < script.length() - 2 && script.charAt(i + 2) == '*') { // "/**". inSmartComment = true; buf.append("*"); i++; } commentLineNumberStart = lineNum; i++; continue; } else if(c2 == '/') { // "//". buf.append("//"); inComment = true; i++; continue; } } break; } // Line comment start (#). case '#': { if(!inComment) { // "#". buf.append("#"); inComment = true; continue; } break; } // Block comment end (*/). case '*': { if(inComment && commentIsBlock && c2 == '/') { // "*/". if(saveAllTokens || inSmartComment) { buf.append("*/"); tokenList.add(new Token(inSmartComment ? TType.SMART_COMMENT : TType.COMMENT, buf.toString(), target)); } buf = new StringBuilder(); target = new Target(lineNum, file, column); inComment = false; commentIsBlock = false; inSmartComment = false; i++; continue; } break; } // Line comment end (\n). case '\n': { if(inComment && !commentIsBlock) { // "\n". inComment = false; if(saveAllTokens) { tokenList.add(new Token(TType.COMMENT, buf.toString(), target)); tokenList.add(new Token(TType.NEWLINE, "\n", new Target(lineNum + 1, file, 0))); } buf = new StringBuilder(); target = new Target(lineNum, file, column); continue; } break; } } } // If we are in a comment, add the character to the buffer. if(inComment) { buf.append(c); continue; } // Handle non-comment non-quoted characters. if(!stateInQuote) { // We're not in a comment or quoted string, handle: +=, -=, *=, /=, .=, ->, ++, --, %, **, *, +, -, /, // >=, <=, <<<, >>>, <, >, ===, !==, ==, !=, &&&, |||, &&, ||, !, {, }, .., ., ::, [, =, ], :, comma, // (, ), ;, and whitespace. matched: { Token token; switch(c) { case '+': { if(c2 == '=') { // "+=". token = new Token(TType.PLUS_ASSIGNMENT, "+=", target); i++; } else if(c2 == '+') { // "++". token = new Token(TType.INCREMENT, "++", target); i++; } else { // "+". token = new Token(TType.PLUS, "+", target); } break; } case '-': { if(c2 == '=') { // "-=". token = new Token(TType.MINUS_ASSIGNMENT, "-=", target); i++; } else if(c2 == '-') { // "--". token = new Token(TType.DECREMENT, "--", target); i++; } else if(c2 == '>') { // "->". token = new Token(TType.DEREFERENCE, "->", target); i++; } else { // "-". token = new Token(TType.MINUS, "-", target); } break; } case '*': { if(c2 == '=') { // "*=". token = new Token(TType.MULTIPLICATION_ASSIGNMENT, "*=", target); i++; } else if(c2 == '*') { // "**". token = new Token(TType.EXPONENTIAL, "**", target); i++; } else { // "*". token = new Token(TType.MULTIPLICATION, "*", target); } break; } case '/': { if(c2 == '=') { // "/=". token = new Token(TType.DIVISION_ASSIGNMENT, "/=", target); i++; } else { // "/". // Protect against matching commands. if(Character.isLetter(c2)) { break matched; // Pretend that division didn't match. } token = new Token(TType.DIVISION, "/", target); } break; } case '.': { if(c2 == '=') { // ".=". token = new Token(TType.CONCAT_ASSIGNMENT, ".=", target); i++; } else if(c2 == '.') { // "..". token = new Token(TType.SLICE, "..", target); i++; } else { // ".". token = new Token(TType.DOT, ".", target); } break; } case '%': { token = new Token(TType.MODULO, "%", target); break; } case '>': { if(c2 == '=') { // ">=". token = new Token(TType.GTE, ">=", target); i++; } else if(c2 == '>' && i < script.length() - 2 && script.charAt(i + 2) == '>') { // ">>>". token = new Token(TType.MULTILINE_START, ">>>", target); inMultiline = true; i += 2; } else { // ">". token = new Token(TType.GT, ">", target); } break; } case '<': { if(c2 == '!') { // "<!". if(buf.length() > 0) { tokenList.add(new Token(TType.UNKNOWN, buf.toString(), target)); buf = new StringBuilder(); target = new Target(lineNum, file, column); } if(saveAllTokens) { tokenList.add(new Token(TType.FILE_OPTIONS_START, "<!", target)); } inFileOptions = true; fileOptionsLineNumberStart = lineNum; i++; continue; } else if(c2 == '=') { // "<=". token = new Token(TType.LTE, "<=", target); i++; } else if(c2 == '<' && i < script.length() - 2 && script.charAt(i + 2) == '<') { // "<<<". token = new Token(TType.MULTILINE_END, "<<<", target); inMultiline = false; i += 2; } else { // "<". token = new Token(TType.LT, "<", target); } break; } case '=': { if(c2 == '=') { if(i < script.length() - 2 && script.charAt(i + 2) == '=') { // "===". token = new Token(TType.STRICT_EQUALS, "===", target); i += 2; } else { // "==". token = new Token(TType.EQUALS, "==", target); i++; } } else { // "=". if(inCommand) { if(inOptVar) { token = new Token(TType.OPT_VAR_ASSIGN, "=", target); } else { token = new Token(TType.ALIAS_END, "=", target); inCommand = false; } } else { token = new Token(TType.ASSIGNMENT, "=", target); } } break; } case '!': { if(c2 == '=') { if(i < script.length() - 2 && script.charAt(i + 2) == '=') { // "!==". token = new Token(TType.STRICT_NOT_EQUALS, "!==", target); i += 2; } else { // "!=". token = new Token(TType.NOT_EQUALS, "!=", target); i++; } } else { // "!". token = new Token(TType.LOGICAL_NOT, "!", target); } break; } case '&': { if(c2 == '&') { if(i < script.length() - 2 && script.charAt(i + 2) == '&') { // "&&&". token = new Token(TType.DEFAULT_AND, "&&&", target); i += 2; } else { // "&&". token = new Token(TType.LOGICAL_AND, "&&", target); i++; } } else { // "&". // Bitwise symbols are not used yet. break matched; // Pretend that bitwise AND didn't match. // token = new Token(TType.BIT_AND, "&", target); } break; } case '|': { if(c2 == '|') { if(i < script.length() - 2 && script.charAt(i + 2) == '|') { // "|||". token = new Token(TType.DEFAULT_OR, "|||", target); i += 2; } else { // "||". token = new Token(TType.LOGICAL_OR, "||", target); i++; } } else { // "|". // Bitwise symbols are not used yet. break matched; // Pretend that bitwise OR didn't match. // token = new Token(TType.BIT_OR, "|", target); } break; } // Bitwise symbols are not used yet. // case '^': { // token = new Token(TType.BIT_XOR, "^", target); // break; // } case ':': { if(c2 == ':') { // "::". token = new Token(TType.DEREFERENCE, "::", target); i++; } else { // ":". token = new Token(TType.LABEL, ":", target); } break; } case '{': { token = new Token(TType.LCURLY_BRACKET, "{", target); break; } case '}': { token = new Token(TType.RCURLY_BRACKET, "}", target); break; } case '[': { token = new Token(TType.LSQUARE_BRACKET, "[", target); inOptVar = true; break; } case ']': { token = new Token(TType.RSQUARE_BRACKET, "]", target); inOptVar = false; break; } case ',': { token = new Token(TType.COMMA, ",", target); break; } case ';': { token = new Token(TType.SEMICOLON, ";", target); break; } case '(': { token = new Token(TType.FUNC_START, "(", target); // Handle the buffer or previous token, with the knowledge that a FUNC_START follows. if(buf.length() > 0) { if(saveAllTokens) { // In this case, we need to check for keywords first, because we want to go ahead // and convert into that stage. In the future, we might want to do this // unconditionally, but for now, just go ahead and only do it if saveAllTokens is // true, because we know that won't be used by the compiler. if(KeywordList.getKeywordByName(buf.toString()) != null) { // It's a keyword. tokenList.add(new Token(TType.KEYWORD, buf.toString(), target)); } else { // It's not a keyword, but a normal function. tokenList.add(new Token(TType.FUNC_NAME, buf.toString(), target)); } } else { tokenList.add(new Token(TType.FUNC_NAME, buf.toString(), target)); } buf = new StringBuilder(); target = new Target(lineNum, file, column); } else { // The previous token, if unknown, should be changed to a FUNC_NAME. If it's not // unknown, we may be doing standalone parenthesis, so auto tack on the __autoconcat__ // function. try { int count = 0; Iterator<Token> it = tokenList.descendingIterator(); Token t; while((t = it.next()).type == TType.WHITESPACE) { count++; } if(t.type == TType.UNKNOWN) { t.type = TType.FUNC_NAME; // Go ahead and remove the whitespace here too, they break things. count--; for(int a = 0; a < count; a++) { tokenList.removeLast(); } } else { tokenList.add(new Token(TType.FUNC_NAME, "__autoconcat__", target)); } } catch (NoSuchElementException e) { // This is the first element on the list, so, it's another autoconcat. tokenList.add(new Token(TType.FUNC_NAME, "__autoconcat__", target)); } } break; } case ')': { token = new Token(TType.FUNC_END, ")", target); break; } case ' ': { // Whitespace case #1. token = new Token(TType.WHITESPACE, " ", target); break; } case '\t': { // Whitespace case #2 (TAB). token = new Token(TType.WHITESPACE, "\t", target); break; } default: { // No match was found at this point, so continue matching below. break matched; } } // Add previous characters as UNKNOWN token. if(buf.length() > 0) { tokenList.add(new Token(TType.UNKNOWN, buf.toString(), target)); buf = new StringBuilder(); target = new Target(lineNum, file, column); } // Add the new token to the token list. tokenList.add(token); // Continue lexing. continue; } } // Handle non-comment characters that might start or stop a quoted string. switch(c) { case '\'': { if(stateInQuote && !inSmartQuote) { tokenList.add(new Token(TType.STRING, buf.toString(), target)); buf = new StringBuilder(); target = new Target(lineNum, file, column); stateInQuote = false; continue; } else if(!stateInQuote) { stateInQuote = true; quoteLineNumberStart = lineNum; inSmartQuote = false; if(buf.length() > 0) { tokenList.add(new Token(TType.UNKNOWN, buf.toString(), target)); buf = new StringBuilder(); target = new Target(lineNum, file, column); } continue; } else { // We're in a smart quote. buf.append("'"); } break; } case '"': { if(stateInQuote && inSmartQuote) { tokenList.add(new Token(TType.SMART_STRING, buf.toString(), target)); buf = new StringBuilder(); target = new Target(lineNum, file, column); stateInQuote = false; inSmartQuote = false; continue; } else if(!stateInQuote) { stateInQuote = true; inSmartQuote = true; smartQuoteLineNumberStart = lineNum; if(buf.length() > 0) { tokenList.add(new Token(TType.UNKNOWN, buf.toString(), target)); buf = new StringBuilder(); target = new Target(lineNum, file, column); } continue; } else { // We're in normal quotes. buf.append('"'); } break; } case '\n': { // Append a newline to the buffer if it's quoted. if(stateInQuote) { buf.append(c); } else { // Newline is not quoted. Move the buffer to an UNKNOWN token and add a NEWLINE token. if(buf.length() > 0) { tokenList.add(new Token(TType.UNKNOWN, buf.toString(), target)); buf = new StringBuilder(); target = new Target(lineNum, file, column); } tokenList.add(new Token(TType.NEWLINE, "\n", target)); } continue; } case '\\': { // Handle escaped characters in quotes or a single "\" seperator token otherwise. // Handle backslash character outside of quotes. if(!stateInQuote) { tokenList.add(new Token(TType.SEPERATOR, "\\", target)); break; } // Handle an escape sign in a quote. switch(c2) { case '\\': case '\'': case '"': buf.append(c2); break; case 'n': buf.append('\n'); break; case 'r': buf.append('\r'); break; case 't': buf.append('\t'); break; case '0': buf.append('\0'); break; case 'f': buf.append('\f'); break; // Form feed. case 'v': buf.append('\u000B'); break; // Vertical TAB. case 'a': buf.append('\u0007'); break; // Alarm. case 'b': buf.append('\u0008'); break; // Backspace. case 'u': { // Unicode (4 characters). // Grab the next 4 characters, and check to see if they are numbers. if(i + 5 >= script.length()) { throw new ConfigCompileException("Unrecognized unicode escape sequence", target); } String unicode = script.substring(i + 2, i + 6); int unicodeNum; try { unicodeNum = Integer.parseInt(unicode, 16); } catch (NumberFormatException e) { throw new ConfigCompileException( "Unrecognized unicode escape sequence: \\u" + unicode, target); } buf.append(Character.toChars(unicodeNum)); i += 4; break; } case 'U': { // Unicode (8 characters). // Grab the next 8 characters and check to see if they are numbers. if(i + 9 >= script.length()) { throw new ConfigCompileException("Unrecognized unicode escape sequence", target); } String unicode = script.substring(i + 2, i + 10); int unicodeNum; try { unicodeNum = Integer.parseInt(unicode, 16); } catch (NumberFormatException e) { throw new ConfigCompileException( "Unrecognized unicode escape sequence: \\u" + unicode, target); } buf.append(Character.toChars(unicodeNum)); i += 8; break; } case '@': { if(!inSmartQuote) { throw new ConfigCompileException("The escape sequence \\@ is not" + " a recognized escape sequence in a non-smart string", target); } buf.append("\\@"); break; } default: { // Since we might expand this list later, don't let them use unescaped backslashes. throw new ConfigCompileException( "The escape sequence \\" + c2 + " is not a recognized escape sequence", target); } } i++; continue; } default: { // At this point, only non-comment and non-escaped characters that are not part of a // quote start/end are left. // Disallow Non-Breaking Space Characters. if(!stateInQuote && c == '\u00A0'/*nbsp*/) { throw new ConfigCompileException("NBSP character in script", target); } // Add the characters that didn't match anything to the buffer. buf.append(c); continue; } } } // End of lexing. // Handle unended file options. if(inFileOptions) { throw new ConfigCompileException("Unended file options. You started the the file options on line " + fileOptionsLineNumberStart, target); } // Handle unended string literals. if(stateInQuote) { if(inSmartQuote) { throw new ConfigCompileException("Unended string literal. You started the last double quote on line " + smartQuoteLineNumberStart, target); } else { throw new ConfigCompileException("Unended string literal. You started the last single quote on line " + quoteLineNumberStart, target); } } // Handle unended comment blocks. Since a newline is added to the end of the script, line comments are ended. if(inComment || commentIsBlock) { throw new ConfigCompileException("Unended block comment. You started the comment on line " + commentLineNumberStart, target); } // Look at the tokens and get meaning from them. Also, look for improper symbol locations // and go ahead and absorb unary +- into the token. ListIterator<Token> it = tokenList.listIterator(0); while(it.hasNext()) { Token t = it.next(); // Combine whitespace tokens into one. if(t.type == TType.WHITESPACE && it.hasNext()) { Token next; if((next = it.next()).type == TType.WHITESPACE) { t.value += next.val(); it.remove(); // Remove 'next'. } else { it.previous(); // Select 'next' <--. } it.previous(); // Select 't' <--. it.next(); // Select 't' -->. } // Convert "-" + number to -number if allowed. it.previous(); // Select 't' <--. if(it.hasPrevious() && t.type == TType.UNKNOWN) { Token prev1 = it.previous(); // Select 'prev1' <--. if(prev1.type.isPlusMinus()) { // Find the first non-whitespace token before the '-'. Token prevNonWhitespace = null; while(it.hasPrevious()) { if(it.previous().type != TType.WHITESPACE) { prevNonWhitespace = it.next(); break; } } while(it.next() != prev1) { // Skip until selection is at 'prev1 -->'. } if(prevNonWhitespace != null) { // Convert "±UNKNOWN" if the '±' is used as a sign (and not an add/subtract operation). if(!prevNonWhitespace.type.isIdentifier() // Don't convert "number/string/var ± ...". && prevNonWhitespace.type != TType.FUNC_END // Don't convert "func() ± ...". && prevNonWhitespace.type != TType.RSQUARE_BRACKET // Don't convert "] ± ..." (arrays). && !IVAR_PATTERN.matcher(t.val()).matches() // Don't convert "± @var". && !VAR_PATTERN.matcher(t.val()).matches()) { // Don't convert "± $var". // It is a negative/positive number: Absorb the sign. t.value = prev1.value + t.value; it.remove(); // Remove 'prev1'. } } } else { it.next(); // Select 'prev1' -->. } } it.next(); // Select 't' -->. // Assign a type to all UNKNOWN tokens. if(t.type == TType.UNKNOWN) { if(t.val().charAt(0) == '/' && t.val().length() > 1) { t.type = TType.COMMAND; } else if(t.val().equals("$")) { t.type = TType.FINAL_VAR; } else if(VAR_PATTERN.matcher(t.val()).matches()) { t.type = TType.VARIABLE; } else if(IVAR_PATTERN.matcher(t.val()).matches()) { t.type = TType.IVARIABLE; } else if(t.val().charAt(0) == '@') { throw new ConfigCompileException("IVariables must match the regex: " + IVAR_PATTERN, target); } else if(keywords.contains(t.val())) { t.type = TType.KEYWORD; } else if(t.val().matches("[\t ]*")) { t.type = TType.WHITESPACE; } else { t.type = TType.LIT; } } // Skip this check if we're not in pure mscript. if(inPureMScript) { if(it.hasNext()) { Token next = it.next(); // Select 'next' -->. it.previous(); // Select 'next' <--. it.previous(); // Select 't' <--. if(t.type.isSymbol() && !t.type.isUnary() && !next.type.isUnary()) { if(it.hasPrevious()) { Token prev1 = it.previous(); // Select 'prev1' <--. if(prev1.type.equals(TType.FUNC_START) || prev1.type.equals(TType.COMMA) || next.type.equals(TType.FUNC_END) || next.type.equals(TType.COMMA) || prev1.type.isSymbol() || next.type.isSymbol()) { throw new ConfigCompileException("Unexpected symbol (" + t.val() + ")", t.getTarget()); } it.next(); // Select 'prev1' -->. } } it.next(); // Select 't' -->. } } } // Set file options tokenList.setFileOptions(fileOptions.toString()); // Make sure that the file options are the first non-comment code in the file { boolean foundCode = false; for(Token t : tokenList) { if(t.type.isFileOption()) { if(foundCode) { throw new ConfigCompileException("File options must be the first non-comment section in the" + " code", t.target); } break; } if(!t.type.isComment() && !t.type.isWhitespace()) { foundCode = true; } } } return tokenList; } /** * This function breaks the token stream into parts, separating the aliases/MethodScript from the command triggers * * @param tokenStream * @return * @throws ConfigCompileException */ public static List<Script> preprocess(TokenStream tokenStream) throws ConfigCompileException { if(tokenStream == null || tokenStream.isEmpty()) { return new ArrayList<>(); } // Remove leading newlines. while(!tokenStream.isEmpty() && tokenStream.getFirst().type == TType.NEWLINE) { tokenStream.removeFirst(); // Remove leading newlines. } // Return an empty list if there were only newlines. if(tokenStream.isEmpty()) { return new ArrayList<>(); } // Remove whitespaces and duplicate newlines. { ListIterator<Token> it = tokenStream.listIterator(0); Token token = it.next(); outerLoop: while(true) { switch(token.type) { case WHITESPACE: { it.remove(); // Remove whitespaces. if(!it.hasNext()) { break outerLoop; } token = it.next(); continue outerLoop; } case NEWLINE: { while(it.hasNext()) { if((token = it.next()).type == TType.NEWLINE) { it.remove(); // Remove duplicate newlines. } else { continue outerLoop; } } break outerLoop; } default: { if(!it.hasNext()) { break outerLoop; } token = it.next(); continue outerLoop; } } } } // Handle multiline constructs. // Take out newlines between the '= >>>' and '<<<' tokens (also removing the '>>>' and '<<<' tokens). // Also remove comments and also remove newlines that are behind a '\'. boolean insideMultiline = false; ListIterator<Token> it = tokenStream.listIterator(0); Token token = null; while(it.hasNext()) { token = it.next(); switch(token.type) { case ALIAS_END: { // "=". if(it.hasNext()) { if(it.next().type == TType.MULTILINE_START) { // "= >>>". insideMultiline = true; it.remove(); // Remove multiline start (>>>). it.previous(); // Select 'token' <---. it.next(); // Select 'token' -->. } else { it.previous(); // Select 'next' <---. } } continue; } case MULTILINE_END: { // "<<<". // Handle multiline end token (<<<) without start. if(!insideMultiline) { throw new ConfigCompileException( "Found multiline end symbol, and no multiline start found", token.target); } insideMultiline = false; it.remove(); // Remove multiline end (<<<). continue; } case MULTILINE_START: { // ">>>". // Handle multiline start token (>>>) while already in multiline. if(insideMultiline) { throw new ConfigCompileException("Did not expect a multiline start symbol here," + " are you missing a multiline end symbol above this line?", token.target); } // Handle multiline start token (>>>) without alias end (=) in front. it.previous(); // Select 'token' <--. if(!it.hasPrevious() || it.previous().type != TType.ALIAS_END) { throw new ConfigCompileException( "Multiline symbol must follow the alias_end (=) symbol", token.target); } it.next(); // Select 'prev' -->. it.next(); // Select 'token' -->. continue; } case NEWLINE: { // "\n". // Skip newlines that are inside a multiline construct. if(insideMultiline) { it.remove(); // Remove newline. } continue; } // Remove comments. case COMMENT: case SMART_COMMENT: { it.remove(); // Remove comment. continue; } default: { // Remove newlines that are behind a '\'. if(token.type != TType.STRING && token.val().equals("\\") && it.hasNext()) { if(it.next().type == TType.NEWLINE) { it.remove(); // Remove newline. it.previous(); // Select 'token' <--. it.next(); // Select 'token' -->. } else { it.previous(); // Select 'next' <--. } } } } } assert token != null; // Handle missing multiline end token. if(insideMultiline) { throw new ConfigCompileException("Expecting a multiline end symbol, but your last multiline alias appears to be missing one.", token.target); } // Now that we have all lines minified, we should be able to split on newlines // and easily find the left and right sides. List<Token> left = new ArrayList<>(); List<Token> right = new ArrayList<>(); List<Script> scripts = new ArrayList<>(); tokenLoop: for(it = tokenStream.listIterator(0); it.hasNext();) { Token t = it.next(); // Add all tokens until ALIAS_END (=) or end of stream. while(t.type != TType.ALIAS_END) { if(!it.hasNext()) { break tokenLoop; // End of stream. } left.add(t); t = it.next(); } // Add all tokens until NEWLINE (\n). while(t.type != TType.NEWLINE) { assert it.hasNext(); // All files end with a newline, so end of stream should be impossible here. right.add(t); t = it.next(); } // Create a new script for the obtained left and right if end of stream has not been reached. if(t.type == TType.NEWLINE) { // Check for spurious symbols, which indicate an issue with the script, but ignore any whitespace. for(int j = left.size() - 1; j >= 0; j--) { if(left.get(j).type == TType.NEWLINE) { if(j > 0 && left.get(j - 1).type != TType.WHITESPACE) { throw new ConfigCompileException( "Unexpected token: " + left.get(j - 1).val(), left.get(j - 1).getTarget()); } } } // Create a new script from the command descriptor (left) and code (right) and add it to the list. Script s = new Script(left, right, null, tokenStream.getFileOptions()); scripts.add(s); // Create new left and right array for the next script. left = new ArrayList<>(); right = new ArrayList<>(); } } // Return the scripts. return scripts; } /** * Compiles the token stream into a valid ParseTree. This also includes optimization and reduction. * * @param stream The token stream, as generated by {@link #lex(String, File, boolean) lex} * @return A fully compiled, optimized, and reduced parse tree. If {@code stream} is null or empty, null is * returned. * @throws ConfigCompileException If the script contains syntax errors. Additionally, during optimization, certain * methods may cause compile errors. Any function that can optimize static occurrences and throws a * {@link ConfigRuntimeException} will have that exception converted to a ConfigCompileException. */ public static ParseTree compile(TokenStream stream) throws ConfigCompileException, ConfigCompileGroupException { Set<ConfigCompileException> compilerErrors = new HashSet<>(); if(stream == null || stream.isEmpty()) { return null; } Target unknown; try { //Instead of using Target.UNKNOWN, we can at least set the file. unknown = new Target(0, stream.get(0).target.file(), 0); } catch (Exception e) { unknown = Target.UNKNOWN; } // Remove all newlines and whitespaces. ListIterator<Token> it = stream.listIterator(0); while(it.hasNext()) { if(it.next().type.isWhitespace()) { it.remove(); } } // Get the file options. final FileOptions fileOptions = stream.getFileOptions(); ParseTree tree = new ParseTree(fileOptions); tree.setData(CNull.NULL); Stack<ParseTree> parents = new Stack<>(); /** * constructCount is used to determine if we need to use autoconcat when reaching a FUNC_END. The previous * constructs, if the count is greater than 1, will be moved down into an autoconcat. */ Stack<AtomicInteger> constructCount = new Stack<>(); constructCount.push(new AtomicInteger(0)); parents.push(tree); tree.addChild(new ParseTree(new CFunction("__autoconcat__", unknown), fileOptions)); parents.push(tree.getChildAt(0)); tree = tree.getChildAt(0); constructCount.push(new AtomicInteger(0)); /** * The array stack is used to keep track of the number of square braces in use. */ Stack<AtomicInteger> arrayStack = new Stack<>(); arrayStack.add(new AtomicInteger(-1)); Stack<AtomicInteger> minusArrayStack = new Stack<>(); Stack<AtomicInteger> minusFuncStack = new Stack<>(); int parens = 0; Token t = null; int braceCount = 0; // Create a Token array to iterate over, rather than using the LinkedList's O(n) get() method. Token[] tokenArray = stream.toArray(new Token[stream.size()]); for(int i = 0; i < tokenArray.length; i++) { t = tokenArray[i]; Token prev1 = i - 1 >= 0 ? tokenArray[i - 1] : new Token(TType.UNKNOWN, "", t.target); Token next1 = i + 1 < stream.size() ? tokenArray[i + 1] : new Token(TType.UNKNOWN, "", t.target); Token next2 = i + 2 < stream.size() ? tokenArray[i + 2] : new Token(TType.UNKNOWN, "", t.target); Token next3 = i + 3 < stream.size() ? tokenArray[i + 3] : new Token(TType.UNKNOWN, "", t.target); // Brace handling if(t.type == TType.LCURLY_BRACKET) { ParseTree b = new ParseTree(new CFunction("__cbrace__", t.getTarget()), fileOptions); tree.addChild(b); tree = b; parents.push(b); braceCount++; constructCount.push(new AtomicInteger(0)); continue; } if(t.type == TType.RCURLY_BRACKET) { braceCount--; if(constructCount.peek().get() > 1) { //We need to autoconcat some stuff int stacks = constructCount.peek().get(); int replaceAt = tree.getChildren().size() - stacks; ParseTree c = new ParseTree(new CFunction("__autoconcat__", tree.getTarget()), fileOptions); List<ParseTree> subChildren = new ArrayList<>(); for(int b = replaceAt; b < tree.numberOfChildren(); b++) { subChildren.add(tree.getChildAt(b)); } c.setChildren(subChildren); if(replaceAt > 0) { List<ParseTree> firstChildren = new ArrayList<>(); for(int d = 0; d < replaceAt; d++) { firstChildren.add(tree.getChildAt(d)); } tree.setChildren(firstChildren); } else { tree.removeChildren(); } tree.addChild(c); } parents.pop(); tree = parents.peek(); constructCount.pop(); try { constructCount.peek().incrementAndGet(); } catch (EmptyStackException e) { throw new ConfigCompileException("Unexpected end curly brace", t.target); } continue; } //Associative array/label handling if(t.type == TType.LABEL && tree.getChildren().size() > 0) { //If it's not an atomic identifier it's an error. if(!prev1.type.isAtomicLit() && prev1.type != TType.IVARIABLE && prev1.type != TType.KEYWORD) { ConfigCompileException error = new ConfigCompileException("Invalid label specified", t.getTarget()); if(prev1.type == TType.FUNC_END) { // This is a fairly common mistake, so we have special handling for this, // because otherwise we would get a "Mismatched parenthesis" warning (which doesn't make sense), // and potentially lots of other invalid errors down the line, so we go ahead // and stop compilation at this point. throw error; } compilerErrors.add(error); } // Wrap previous construct in a CLabel ParseTree cc = tree.getChildren().get(tree.getChildren().size() - 1); tree.removeChildAt(tree.getChildren().size() - 1); tree.addChild(new ParseTree(new CLabel(cc.getData()), fileOptions)); continue; } //Array notation handling if(t.type.equals(TType.LSQUARE_BRACKET)) { arrayStack.push(new AtomicInteger(tree.getChildren().size() - 1)); continue; } else if(t.type.equals(TType.RSQUARE_BRACKET)) { boolean emptyArray = false; if(prev1.type.equals(TType.LSQUARE_BRACKET)) { emptyArray = true; } if(arrayStack.size() == 1) { throw new ConfigCompileException("Mismatched square bracket", t.target); } //array is the location of the array int array = arrayStack.pop().get(); //index is the location of the first node with the index int index = array + 1; if(!tree.hasChildren() || array == -1) { throw new ConfigCompileException("Brackets are illegal here", t.target); } ParseTree myArray = tree.getChildAt(array); ParseTree myIndex; if(!emptyArray) { myIndex = new ParseTree(new CFunction("__autoconcat__", myArray.getTarget()), fileOptions); for(int j = index; j < tree.numberOfChildren(); j++) { myIndex.addChild(tree.getChildAt(j)); } } else { myIndex = new ParseTree(new CSlice("0..-1", t.target), fileOptions); } tree.setChildren(tree.getChildren().subList(0, array)); ParseTree arrayGet = new ParseTree(new CFunction("array_get", t.target), fileOptions); arrayGet.addChild(myArray); arrayGet.addChild(myIndex); // Check if the @var[...] had a negating "-" in front. If so, add a neg(). if(!minusArrayStack.isEmpty() && arrayStack.size() + 1 == minusArrayStack.peek().get()) { if(!next1.type.equals(TType.LSQUARE_BRACKET)) { // Wait if there are more array_get's comming. ParseTree negTree = new ParseTree(new CFunction("neg", unknown), fileOptions); negTree.addChild(arrayGet); tree.addChild(negTree); minusArrayStack.pop(); } else { // Negate the next array_get instead, so just add this one to the tree. tree.addChild(arrayGet); } } else { tree.addChild(arrayGet); } constructCount.peek().set(constructCount.peek().get() - myIndex.numberOfChildren()); continue; } //Smart strings if(t.type == TType.SMART_STRING) { if(t.val().contains("@")) { ParseTree function = new ParseTree(fileOptions); function.setData(new CFunction(new Compiler.smart_string().getName(), t.target)); ParseTree string = new ParseTree(fileOptions); string.setData(new CString(t.value, t.target)); function.addChild(string); tree.addChild(function); } else { tree.addChild(new ParseTree(new CString(t.val(), t.target), fileOptions)); } constructCount.peek().incrementAndGet(); continue; } if(t.type == TType.DEREFERENCE) { //Currently unimplemented, but going ahead and making it strict compilerErrors.add(new ConfigCompileException("The '" + t.val() + "' symbol is not currently allowed in raw strings. You must quote all" + " symbols.", t.target)); } if(t.type.equals(TType.FUNC_NAME)) { CFunction func = new CFunction(t.val(), t.target); ParseTree f = new ParseTree(func, fileOptions); tree.addChild(f); constructCount.push(new AtomicInteger(0)); tree = f; parents.push(f); } else if(t.type.equals(TType.FUNC_START)) { if(!prev1.type.equals(TType.FUNC_NAME)) { throw new ConfigCompileException("Unexpected parenthesis", t.target); } parens++; } else if(t.type.equals(TType.FUNC_END)) { if(parens <= 0) { throw new ConfigCompileException("Unexpected parenthesis", t.target); } parens--; parents.pop(); // Pop function. if(constructCount.peek().get() > 1) { //We need to autoconcat some stuff int stacks = constructCount.peek().get(); int replaceAt = tree.getChildren().size() - stacks; ParseTree c = new ParseTree(new CFunction("__autoconcat__", tree.getTarget()), fileOptions); List<ParseTree> subChildren = new ArrayList<>(); for(int b = replaceAt; b < tree.numberOfChildren(); b++) { subChildren.add(tree.getChildAt(b)); } c.setChildren(subChildren); if(replaceAt > 0) { List<ParseTree> firstChildren = new ArrayList<>(); for(int d = 0; d < replaceAt; d++) { firstChildren.add(tree.getChildAt(d)); } tree.setChildren(firstChildren); } else { tree.removeChildren(); } tree.addChild(c); } constructCount.pop(); try { constructCount.peek().incrementAndGet(); } catch (EmptyStackException e) { throw new ConfigCompileException("Unexpected end parenthesis", t.target); } try { tree = parents.peek(); } catch (EmptyStackException e) { throw new ConfigCompileException("Unexpected end parenthesis", t.target); } // Handle "-func(args)" and "-func(args)[index]". if(!minusFuncStack.isEmpty() && minusFuncStack.peek().get() == parens + 1) { if(next1.type.equals(TType.LSQUARE_BRACKET)) { // Move the negation to the array_get which contains this function. minusArrayStack.push(new AtomicInteger(arrayStack.size() + 1)); // +1 because the bracket isn't counted yet. } else { // Negate this function. ParseTree negTree = new ParseTree(new CFunction("neg", unknown), fileOptions); negTree.addChild(tree.getChildAt(tree.numberOfChildren() - 1)); tree.removeChildAt(tree.numberOfChildren() - 1); tree.addChildAt(tree.numberOfChildren(), negTree); } minusFuncStack.pop(); } } else if(t.type.equals(TType.COMMA)) { if(constructCount.peek().get() > 1) { int stacks = constructCount.peek().get(); int replaceAt = tree.getChildren().size() - stacks; ParseTree c = new ParseTree(new CFunction("__autoconcat__", unknown), fileOptions); List<ParseTree> subChildren = new ArrayList<>(); for(int b = replaceAt; b < tree.numberOfChildren(); b++) { subChildren.add(tree.getChildAt(b)); } c.setChildren(subChildren); if(replaceAt > 0) { List<ParseTree> firstChildren = new ArrayList<>(); for(int d = 0; d < replaceAt; d++) { firstChildren.add(tree.getChildAt(d)); } tree.setChildren(firstChildren); } else { tree.removeChildren(); } tree.addChild(c); } constructCount.peek().set(0); continue; } if(t.type == TType.SLICE) { //We got here because the previous token isn't being ignored, because it's //actually a control character, instead of whitespace, but this is a //"empty first" slice notation. Compare this to the code below. try { CSlice slice; String value = next1.val(); if(next1.type == TType.MINUS || next1.type == TType.PLUS) { value = next1.val() + next2.val(); i++; } slice = new CSlice(".." + value, t.getTarget()); i++; tree.addChild(new ParseTree(slice, fileOptions)); constructCount.peek().incrementAndGet(); continue; } catch (ConfigRuntimeException ex) { //CSlice can throw CREs, but at this stage, we have to //turn them into a CCE. throw new ConfigCompileException(ex); } } if(next1.type.equals(TType.SLICE)) { //Slice notation handling try { CSlice slice; if(t.type.isSeparator() || (t.type.isWhitespace() && prev1.type.isSeparator()) || t.type.isKeyword()) { //empty first String value = next2.val(); i++; if(next2.type == TType.MINUS || next2.type == TType.PLUS) { value = next2.val() + next3.val(); i++; } slice = new CSlice(".." + value, next1.getTarget()); if(t.type.isKeyword()) { tree.addChild(new ParseTree(new CKeyword(t.val(), t.getTarget()), fileOptions)); constructCount.peek().incrementAndGet(); } } else if(next2.type.isSeparator() || next2.type.isKeyword()) { //empty last String modifier = ""; if(prev1.type == TType.MINUS || prev1.type == TType.PLUS) { //The negative would have already been inserted into the tree modifier = prev1.val(); tree.removeChildAt(tree.getChildren().size() - 1); } slice = new CSlice(modifier + t.value + "..", t.target); } else { //both are provided String modifier1 = ""; if(prev1.type == TType.MINUS || prev1.type == TType.PLUS) { //It's a negative, incorporate that here, and remove the //minus from the tree modifier1 = prev1.val(); tree.removeChildAt(tree.getChildren().size() - 1); } Token first = t; if(first.type.isWhitespace()) { first = prev1; } Token second = next2; i++; String modifier2 = ""; if(next2.type == TType.MINUS || next2.type == TType.PLUS) { modifier2 = next2.val(); second = next3; i++; } slice = new CSlice(modifier1 + first.value + ".." + modifier2 + second.value, t.target); } i++; tree.addChild(new ParseTree(slice, fileOptions)); constructCount.peek().incrementAndGet(); continue; } catch (ConfigRuntimeException ex) { //CSlice can throw CREs, but at this stage, we have to //turn them into a CCE. throw new ConfigCompileException(ex); } } else if(t.type == TType.LIT) { Construct c = Static.resolveConstruct(t.val(), t.target); if(c instanceof CString && fileOptions.isStrict()) { compilerErrors.add(new ConfigCompileException("Bare strings are not allowed in strict mode", t.target)); } else if((c instanceof CInt || c instanceof CDecimal) && next1.type == TType.DOT && next2.type == TType.LIT) { // make CDouble/CDecimal here because otherwise Long.parseLong() will remove // minus zero before decimals and leading zeroes after decimals try { if(t.value.startsWith("0m")) { // CDecimal String neg = ""; if(prev1.value.equals("-")) { neg = "-"; } c = new CDecimal(neg + t.value.substring(2) + '.' + next2.value, t.target); } else { // CDouble c = new CDouble(Double.parseDouble(t.val() + '.' + next2.val()), t.target); } i += 2; } catch (NumberFormatException e) { // Not a double } } tree.addChild(new ParseTree(c, fileOptions)); constructCount.peek().incrementAndGet(); } else if(t.type.equals(TType.STRING) || t.type.equals(TType.COMMAND)) { tree.addChild(new ParseTree(new CString(t.val(), t.target), fileOptions)); constructCount.peek().incrementAndGet(); } else if(t.type.equals(TType.IDENTIFIER)) { tree.addChild(new ParseTree(new CPreIdentifier(t.val(), t.target), fileOptions)); constructCount.peek().incrementAndGet(); } else if(t.type.isKeyword()) { tree.addChild(new ParseTree(new CKeyword(t.val(), t.getTarget()), fileOptions)); constructCount.peek().incrementAndGet(); } else if(t.type.equals(TType.IVARIABLE)) { tree.addChild(new ParseTree(new IVariable(t.val(), t.target), fileOptions)); constructCount.peek().incrementAndGet(); } else if(t.type.equals(TType.UNKNOWN)) { tree.addChild(new ParseTree(Static.resolveConstruct(t.val(), t.target), fileOptions)); constructCount.peek().incrementAndGet(); } else if(t.type.isSymbol()) { //Logic and math symbols // Attempt to find "-@var" and change it to "neg(@var)" if it's not @a - @b. Else just add the symbol. // Also handles "-function()" and "-@var[index]". if(t.type.equals(TType.MINUS) && !prev1.type.isAtomicLit() && !prev1.type.equals(TType.IVARIABLE) && !prev1.type.equals(TType.VARIABLE) && !prev1.type.equals(TType.RCURLY_BRACKET) && !prev1.type.equals(TType.RSQUARE_BRACKET) && !prev1.type.equals(TType.FUNC_END) && (next1.type.equals(TType.IVARIABLE) || next1.type.equals(TType.VARIABLE) || next1.type.equals(TType.FUNC_NAME))) { // Check if we are negating a value from an array, function or variable. if(next2.type.equals(TType.LSQUARE_BRACKET)) { minusArrayStack.push(new AtomicInteger(arrayStack.size() + 1)); // +1 because the bracket isn't counted yet. } else if(next1.type.equals(TType.FUNC_NAME)) { minusFuncStack.push(new AtomicInteger(parens + 1)); // +1 because the function isn't counted yet. } else { ParseTree negTree = new ParseTree(new CFunction("neg", unknown), fileOptions); negTree.addChild(new ParseTree(new IVariable(next1.value, next1.target), fileOptions)); tree.addChild(negTree); constructCount.peek().incrementAndGet(); i++; // Skip the next variable as we've just handled it. } } else { tree.addChild(new ParseTree(new CSymbol(t.val(), t.type, t.target), fileOptions)); constructCount.peek().incrementAndGet(); } } else if(t.type == TType.DOT) { // Check for doubles that start with a decimal, otherwise concat Construct c = null; if(next1.type == TType.LIT && prev1.type != TType.STRING && prev1.type != TType.SMART_STRING) { try { c = new CDouble(Double.parseDouble('.' + next1.val()), t.target); i++; } catch (NumberFormatException e) { // Not a double } } if(c == null) { c = new CSymbol(".", TType.CONCAT, t.target); } tree.addChild(new ParseTree(c, fileOptions)); constructCount.peek().incrementAndGet(); } else if(t.type.equals(TType.VARIABLE) || t.type.equals(TType.FINAL_VAR)) { tree.addChild(new ParseTree(new Variable(t.val(), null, false, t.type.equals(TType.FINAL_VAR), t.target), fileOptions)); constructCount.peek().incrementAndGet(); //right_vars.add(new Variable(t.val(), null, t.line_num)); } } assert t != null; // Handle mismatching square brackets "[]". assert arrayStack.size() != 0 : "The last element of arrayStack should be present, but it was popped."; if(arrayStack.size() != 1) { // Some starting square bracket '[' was not closed at the end of the script. // Find the last '[' that was not closed and use that as target instead of the last line of the script. Target target = traceMismatchedOpenToken(stream, TType.LSQUARE_BRACKET, TType.RSQUARE_BRACKET); assert target != null : "Mismatched bracket was detected, but target-finding code could not find it."; if(target == null) { target = t.target; } // Throw a CRE. throw new ConfigCompileException("Mismatched square brackets", target); } // Handle mismatching parentheses "()". if(parens != 0) { // Some starting parentheses '(' was not closed at the end of the script. // Find the last '(' that was not closed and use that as target instead of the last line of the script. Target target = traceMismatchedOpenToken(stream, TType.FUNC_START, TType.FUNC_END); assert target != null : "Mismatched parentheses was detected, but target-finding code could not find it."; if(target == null) { target = t.target; } // Throw a CRE. throw new ConfigCompileException("Mismatched parentheses", target); } // Handle mismatching curly braces "{}". if(braceCount != 0) { // Some starting curly brace '{' was not closed at the end of the script. // Find the last '{' that was not closed and use that as target instead of the last line of the script. Target target = traceMismatchedOpenToken(stream, TType.LCURLY_BRACKET, TType.RCURLY_BRACKET); assert target != null : "Mismatched curly brace was detected, but target-finding code could not find it."; if(target == null) { target = t.target; } // Throw a CRE. throw new ConfigCompileException("Mismatched curly braces", target); } Stack<List<Procedure>> procs = new Stack<>(); procs.add(new ArrayList<Procedure>()); processKeywords(tree); optimizeAutoconcats(tree, compilerErrors); optimize(tree, procs, compilerErrors); link(tree, compilerErrors); checkLabels(tree, compilerErrors); checkBreaks(tree, compilerErrors); if(!compilerErrors.isEmpty()) { if(compilerErrors.size() == 1) { // Just throw the one CCE for(ConfigCompileException e : compilerErrors) { throw e; } } else { throw new ConfigCompileGroupException(compilerErrors); } } parents.pop(); tree = parents.pop(); return tree; } /** * Trace target of mismatching open tokens such as '(' in '()' or '{' in '{}'. This should be used when it is * known that there are more start than close tokens, but no target is known for the extra start token. * @param stream - The token stream to scan. * @param openType - The open type, which would be {@link TType#FUNC_START (} for a parentheses check. * @param closeType - The close type, which would be {@link TType#FUNC_END )} for a parentheses check. * @return The target of the last occurrence of the opening type that did not have a matching closing type. * Returns null of no target was found. */ private static Target traceMismatchedOpenToken(TokenStream stream, TType openType, TType closeType) { // Some starting parentheses '(' was not closed at the end of the script. // Find the last '(' that was not closed and use that as target instead of the last line of the script. Iterator<Token> iterator = stream.descendingIterator(); int closingCount = 0; while(iterator.hasNext()) { Token token = iterator.next(); if(token.type == closeType) { closingCount++; } else if(token.type == openType) { if(closingCount <= 0) { return token.target; } closingCount--; } } return null; } /** * Recurses down the tree and ensures that breaks don't bubble up past procedures or the root code tree. * * @param tree * @throws ConfigCompileException */ private static void checkBreaks(ParseTree tree, Set<ConfigCompileException> compilerExceptions) { checkBreaks0(tree, 0, null, compilerExceptions); } private static void checkBreaks0(ParseTree tree, long currentLoops, String lastUnbreakable, Set<ConfigCompileException> compilerErrors) { if(!(tree.getData() instanceof CFunction)) { //Don't care about these return; } if(tree.getData().val().startsWith("_")) { //It's a proc. We need to recurse, but not check this "function" for(ParseTree child : tree.getChildren()) { checkBreaks0(child, currentLoops, lastUnbreakable, compilerErrors); } return; } Function func; try { func = ((CFunction) tree.getData()).getFunction(); } catch (ConfigCompileException ex) { compilerErrors.add(ex); return; } if(func.getClass().getAnnotation(nolinking.class) != null) { // Don't link here return; } // We have special handling for procs and closures, and of course break and the loops. // If any of these are here, we kick into special handling mode. Otherwise, we recurse. if(func instanceof DataHandling._break) { // First grab the counter in the break function. If the break function doesn't // have any children, then 1 is implied. break() requires the argument to be // a CInt, so if it weren't, there would already have been a compile error, so // we can assume it will be a CInt. long breakCounter = 1; if(tree.getChildren().size() == 1) { breakCounter = ((CInt) tree.getChildAt(0).getData()).getInt(); } if(breakCounter > currentLoops) { // Throw an exception, as this would break above a loop. Different error messages // are applied to different cases if(currentLoops == 0) { compilerErrors.add(new ConfigCompileException("The break() function can only break out of loops" + (lastUnbreakable == null ? "." : ", but an attempt to break out of a " + lastUnbreakable + " was detected."), tree.getTarget())); } else { compilerErrors.add(new ConfigCompileException("Too many breaks" + " detected. Check your loop nesting, and set the break count to an appropriate value.", tree.getTarget())); } } return; } if(func.getClass().getAnnotation(unbreakable.class) != null) { // Parse the children like normal, but reset the counter to 0. for(ParseTree child : tree.getChildren()) { checkBreaks0(child, 0, func.getName(), compilerErrors); } return; } if(func.getClass().getAnnotation(breakable.class) != null) { // Don't break yet, still recurse, but up our current loops counter. currentLoops++; } for(ParseTree child : tree.getChildren()) { checkBreaks0(child, currentLoops, lastUnbreakable, compilerErrors); } } /** * Optimizing __autoconcat__ out should happen early, and should happen regardless of whether or not optimizations * are on or off. So this is broken off into a separate optimization procedure, so that the intricacies of the * normal optimizations don't apply to __autoconcat__. * * @param root * @param compilerExceptions */ private static void optimizeAutoconcats(ParseTree root, Set<ConfigCompileException> compilerExceptions) { for(ParseTree child : root.getChildren()) { if(child.hasChildren()) { optimizeAutoconcats(child, compilerExceptions); } } if(root.getData() instanceof CFunction && root.getData().val().equals(__autoconcat__)) { try { ParseTree ret = ((Compiler.__autoconcat__) ((CFunction) root.getData()).getFunction()).optimizeDynamic(root.getTarget(), root.getChildren(), root.getFileOptions()); root.setData(ret.getData()); root.setChildren(ret.getChildren()); } catch (ConfigCompileException ex) { compilerExceptions.add(ex); } } } /** * Recurses down the tree and ensures that there are no dynamic labels. This has to finish completely after * optimization, because the optimizer has no good hook to know when optimization for a unit is fully completed, * until ALL units are fully complete, so this happens separately after optimization, but as apart of the normal * compile process. * * @param tree * @throws ConfigCompileException */ private static void checkLabels(ParseTree tree, Set<ConfigCompileException> compilerErrors) throws ConfigCompileException { // for(ParseTree t : tree.getChildren()){ // if(t.getData() instanceof CLabel){ // if(((CLabel)t.getData()).cVal() instanceof IVariable){ // throw new ConfigCompileException("Variables may not be used as labels", t.getTarget()); // } // } // checkLabels(t); // } } /** * Recurses down the tree and * <ul><li>Links functions</li> * <li>Checks function arguments</li></ul> * This is a separate process from optimization, because optimization ignores any missing functions. * * @param tree */ private static void link(ParseTree tree, Set<ConfigCompileException> compilerErrors) { FunctionBase treeFunction = null; try { treeFunction = FunctionList.getFunction(tree.getData()); if(treeFunction.getClass().getAnnotation(nolinking.class) != null) { //Don't link children of a nolinking function. return; } } catch (ConfigCompileException ex) { //This can happen if the treeFunction isn't a function, is a proc, etc, //but we don't care, we just want to continue. } // Check the argument count, and do any custom linking the function may have if(treeFunction != null) { Integer[] numArgs = treeFunction.numArgs(); if(!Arrays.asList(numArgs).contains(Integer.MAX_VALUE) && !Arrays.asList(numArgs).contains(tree.getChildren().size())) { compilerErrors.add(new ConfigCompileException("Incorrect number of arguments passed to " + tree.getData().val(), tree.getData().getTarget())); } if(treeFunction instanceof Optimizable) { Optimizable op = (Optimizable) treeFunction; if(op.optimizationOptions().contains(OptimizationOption.CUSTOM_LINK)) { try { op.link(tree.getData().getTarget(), tree.getChildren()); } catch (ConfigCompileException ex) { compilerErrors.add(ex); } } } } // Walk the children for(ParseTree child : tree.getChildren()) { if(child.getData() instanceof CFunction) { if(child.getData().val().charAt(0) != '_' || child.getData().val().charAt(1) == '_') { // This will throw an exception if the function doesn't exist. try { FunctionList.getFunction(child.getData()); } catch (ConfigCompileException ex) { compilerErrors.add(ex); } } link(child, compilerErrors); } } } @SuppressWarnings("checkstyle:constantname") // Variable is more clear when named after the function it represents. private static final String __autoconcat__ = new Compiler.__autoconcat__().getName(); /** * Recurses down into the tree, attempting to optimize where possible. A few things have strong coupling, for * information on these items, see the documentation included in the source. * * @param tree * @return */ private static void optimize(ParseTree tree, Stack<List<Procedure>> procs, Set<ConfigCompileException> compilerErrors) { if(tree.isOptimized()) { return; //Don't need to re-run this } // if(tree.getData() instanceof CIdentifier) { // optimize(((CIdentifier) tree.getData()).contained(), procs); // return; // } if(!(tree.getData() instanceof CFunction)) { //There's no way to optimize something that's not a function return; } //If it is a proc definition, we need to go ahead and see if we can add it to the const proc stack if(tree.getData().val().equals("proc")) { procs.push(new ArrayList<Procedure>()); } CFunction cFunction = (CFunction) tree.getData(); Function func; try { func = (Function) FunctionList.getFunction(cFunction); } catch (ConfigCompileException e) { func = null; } if(func != null) { if(func.getClass().getAnnotation(nolinking.class) != null) { //It's an unlinking function, so we need to stop at this point return; } } if(cFunction instanceof CIdentifier) { //Add the child to the identifier ParseTree c = ((CIdentifier) cFunction).contained(); tree.addChild(c); c.getData().setWasIdentifier(true); } List<ParseTree> children = tree.getChildren(); if(func instanceof Optimizable && ((Optimizable) func).optimizationOptions().contains(OptimizationOption.PRIORITY_OPTIMIZATION)) { // This is a priority optimization function, meaning it needs to be optimized before its children are. // This is required when optimization of the children could cause different internal behavior, for instance // if this function is expecting the precense of soem code element, but the child gets optimized out, this // would cause an error, even though the user did in fact provide code in that section. try { ((Optimizable) func).optimizeDynamic(tree.getTarget(), children, tree.getFileOptions()); } catch (ConfigCompileException ex) { // If an error occurs, we will skip the rest of this element compilerErrors.add(ex); return; } catch (ConfigRuntimeException ex) { compilerErrors.add(new ConfigCompileException(ex)); return; } } //Loop through the children, and if any of them are functions that are terminal, truncate. //To explain this further, consider the following: //For the code: concat(die(), msg('')), this diagram shows the abstract syntax tree: // (concat) // / \ // / \ // (die) (msg) //By looking at the code, we can tell that msg() will never be called, because die() will run first, //and since it is a "terminal" function, any code after it will NEVER run. However, consider a more complex condition: // if(@input){ die() msg('1') } else { msg('2') msg('3') } // if(@input) // [true]/ \[false] // / \ // (sconcat) (sconcat) // / \ / \ // / \ / \ // (die) (msg[1])(msg[2]) (msg[3]) //In this case, only msg('1') is guaranteed not to run, msg('2') and msg('3') will still run in some cases. //So, we can optimize out msg('1') in this case, which would cause the tree to become much simpler, therefore a worthwile optimization: // if(@input) // [true]/ \[false] // / \ // (die) (sconcat) // / \ // / \ // (msg[2]) (msg[3]) //We do have to be careful though, because of functions like if, which actually work like this: //if(@var){ die() } else { msg('') } // (if) // / | \ // / | \ // @var (die) (msg) //We can't git rid of the msg() here, because it is actually in another branch. //For the time being, we will simply say that if a function uses execs, it //is a branch (branches always use execs, though using execs doesn't strictly //mean you are a branch type function). for(int i = 0; i < children.size(); i++) { ParseTree t = children.get(i); if(t.getData() instanceof CFunction) { if(t.getData().val().startsWith("_") || (func != null && func.useSpecialExec())) { continue; } Function f; try { f = (Function) FunctionList.getFunction(t.getData()); } catch (ConfigCompileException ex) { compilerErrors.add(ex); return; } Set<OptimizationOption> options = NO_OPTIMIZATIONS; if(f instanceof Optimizable) { options = ((Optimizable) f).optimizationOptions(); } if(options.contains(OptimizationOption.TERMINAL)) { if(children.size() > i + 1) { //First, a compiler warning CHLog.GetLogger().Log(CHLog.Tags.COMPILER, LogLevel.WARNING, "Unreachable code. Consider removing this code.", children.get(i + 1).getTarget()); //Now, truncate the children for(int j = children.size() - 1; j > i; j--) { children.remove(j); } break; } } } } boolean fullyStatic = true; boolean hasIVars = false; for(ParseTree node : children) { if(node.getData() instanceof CFunction) { optimize(node, procs, compilerErrors); } if(node.getData().isDynamic() && !(node.getData() instanceof IVariable)) { fullyStatic = false; } if(node.getData() instanceof IVariable) { hasIVars = true; } } //In all cases, at this point, we are either unable to optimize, or we will //optimize, so set our optimized variable at this point. tree.setOptimized(true); if(func == null) { //It's a proc call. Let's see if we can optimize it Procedure p = null; loop: for(List<Procedure> proc : procs) { for(Procedure pp : proc) { if(pp.getName().equals(cFunction.val())) { p = pp; break loop; } } } if(p != null) { try { Construct c = DataHandling.proc.optimizeProcedure(p.getTarget(), p, children); if(c != null) { tree.setData(c); tree.removeChildren(); return; } //else Nope, couldn't optimize. } catch (ConfigRuntimeException ex) { //Cool. Caught a runtime error at compile time :D compilerErrors.add(new ConfigCompileException(ex)); } } //else this procedure isn't listed yet. Maybe a compiler error, maybe not, depends, //so we can't for sure say, but we do know we can't optimize this return; } if(tree.getData().val().equals("proc")) { //Check for too few arguments if(children.size() < 2) { compilerErrors.add(new ConfigCompileException("Incorrect number of arguments passed to proc", tree.getData().getTarget())); return; } //We just went out of scope, so we need to pop the layer of Procedures that //are internal to us procs.pop(); //However, as a special function, we *might* be able to get a const proc out of this //Let's see. try { ParseTree root = new ParseTree(new CFunction(__autoconcat__, Target.UNKNOWN), tree.getFileOptions()); Script fakeScript = Script.GenerateScript(root, "*"); Environment env = null; try { if(Implementation.GetServerType().equals(Implementation.Type.BUKKIT)) { CommandHelperPlugin plugin = CommandHelperPlugin.self; GlobalEnv gEnv = new GlobalEnv(plugin.executionQueue, plugin.profiler, plugin.persistenceNetwork, MethodScriptFileLocations.getDefault().getConfigDirectory(), plugin.profiles, new TaskManager()); env = Environment.createEnvironment(gEnv, new CommandHelperEnvironment()); } else { env = Static.GenerateStandaloneEnvironment(false); } } catch (IOException | DataSourceException | URISyntaxException | Profiles.InvalidProfileException e) { // } Procedure myProc = DataHandling.proc.getProcedure(tree.getTarget(), env, fakeScript, children.toArray(new ParseTree[children.size()])); procs.peek().add(myProc); //Yep. So, we can move on with our lives now, and if it's used later, it could possibly be static. } catch (ConfigRuntimeException e) { //Well, they have an error in there somewhere compilerErrors.add(new ConfigCompileException(e)); } catch (NullPointerException e) { //Nope, can't optimize. return; } } //the compiler trick functions know how to deal with it specially, even if everything isn't //static, so do this first. String oldFunctionName = func.getName(); Set<OptimizationOption> options = NO_OPTIMIZATIONS; if(func instanceof Optimizable) { options = ((Optimizable) func).optimizationOptions(); } if(options.contains(OptimizationOption.OPTIMIZE_DYNAMIC)) { try { ParseTree tempNode; try { tempNode = ((Optimizable) func).optimizeDynamic(tree.getData().getTarget(), tree.getChildren(), tree.getFileOptions()); } catch (ConfigRuntimeException e) { //Turn it into a compile exception, then rethrow throw new ConfigCompileException(e); } if(tempNode == Optimizable.PULL_ME_UP) { if(tree.hasChildren()) { tempNode = tree.getChildAt(0); } else { tempNode = null; } } if(tempNode == Optimizable.REMOVE_ME) { tree.setData(new CFunction("p", Target.UNKNOWN)); tree.removeChildren(); } else if(tempNode != null) { tree.setData(tempNode.getData()); tree.setOptimized(tempNode.isOptimized()); tree.setChildren(tempNode.getChildren()); tree.getData().setWasIdentifier(tempNode.getData().wasIdentifier()); optimize(tree, procs, compilerErrors); tree.setOptimized(true); //Some functions can actually make static the arguments, for instance, by pulling up a hardcoded //array, so if they have reversed this, make note of that now if(tempNode.hasBeenMadeStatic()) { fullyStatic = true; } } //else it wasn't an optimization, but a compile check } catch (ConfigCompileException ex) { compilerErrors.add(ex); } } if(!fullyStatic) { return; } //Otherwise, everything is static, or an IVariable and we can proceed. //Note since we could still have IVariables, we have to handle those //specially from here forward if(func.preResolveVariables() && hasIVars) { //Well, this function isn't equipped to deal with IVariables. return; } //It could have optimized by changing the name, in that case, we //don't want to run this now if(tree.getData().getValue().equals(oldFunctionName) && (options.contains(OptimizationOption.OPTIMIZE_CONSTANT) || options.contains(OptimizationOption.CONSTANT_OFFLINE))) { Construct[] constructs = new Construct[tree.getChildren().size()]; for(int i = 0; i < tree.getChildren().size(); i++) { constructs[i] = tree.getChildAt(i).getData(); } try { try { Construct result; if(options.contains(OptimizationOption.CONSTANT_OFFLINE)) { List<Integer> numArgsList = Arrays.asList(func.numArgs()); if(!numArgsList.contains(Integer.MAX_VALUE) && !numArgsList.contains(tree.getChildren().size())) { compilerErrors.add(new ConfigCompileException("Incorrect number of arguments passed to " + tree.getData().val(), tree.getData().getTarget())); result = null; } else { result = func.exec(tree.getData().getTarget(), null, constructs); } } else { result = ((Optimizable) func).optimize(tree.getData().getTarget(), constructs); } //If the result is null, it was just a check, it can't optimize further. if(result != null) { result.setWasIdentifier(tree.getData().wasIdentifier()); tree.setData(result); tree.removeChildren(); } } catch (ConfigRuntimeException e) { //Turn this into a ConfigCompileException, then rethrow throw new ConfigCompileException(e); } } catch (ConfigCompileException ex) { compilerErrors.add(ex); } } //It doesn't know how to optimize. Oh well. } /** * Runs keyword processing on the tree. Note that this is run before optimization, and is a depth first process. * * @param tree */ private static void processKeywords(ParseTree tree) throws ConfigCompileException { // Keyword processing List<ParseTree> children = tree.getChildren(); for(int i = 0; i < children.size(); i++) { ParseTree node = children.get(i); // Keywords can be standalone, or a function can double as a keyword. So we have to check for both // conditions. processKeywords(node); if(node.getData() instanceof CKeyword || (node.getData() instanceof CLabel && ((CLabel) node.getData()).cVal() instanceof CKeyword) || (node.getData() instanceof CFunction && KeywordList.getKeywordByName(node.getData().val()) != null)) { // This looks a bit confusing, but is fairly straightforward. We want to process the child elements of all // remaining nodes, so that subchildren that need processing will be finished, and our current tree level will // be able to independently process it. We don't want to process THIS level though, just the children of this level. for(int j = i + 1; j < children.size(); j++) { processKeywords(children.get(j)); } // Now that all the children of the rest of the chain are processed, we can do the processing of this level. i = KeywordList.getKeywordByName(node.getData().val()).process(children, i); } } } /** * Shorthand for lexing, compiling, and executing a script. * * @param script The textual script to execute * @param file The file it was located in * @param inPureMScript If it is pure MScript, or aliases * @param env The execution environment * @param done The MethodScriptComplete callback (may be null) * @param s A script object (may be null) * @param vars Any $vars (may be null) * @return * @throws ConfigCompileException * @throws com.laytonsmith.core.exceptions.ConfigCompileGroupException This indicates that a group of compile errors * occurred. */ public static Construct execute(String script, File file, boolean inPureMScript, Environment env, MethodScriptComplete done, Script s, List<Variable> vars) throws ConfigCompileException, ConfigCompileGroupException { return execute(compile(lex(script, file, inPureMScript)), env, done, s, vars); } /** * Executes a pre-compiled MethodScript, given the specified Script environment. Both done and script may be null, * and if so, reasonable defaults will be provided. The value sent to done will also be returned, as a Construct, so * this one function may be used synchronously also. * * @param root * @param env * @param done * @param script * @return */ public static Construct execute(ParseTree root, Environment env, MethodScriptComplete done, Script script) { return execute(root, env, done, script, null); } /** * Executes a pre-compiled MethodScript, given the specified Script environment, but also provides a method to set * the constants in the script. * * @param root * @param env * @param done * @param script * @param vars * @return */ public static Construct execute(ParseTree root, Environment env, MethodScriptComplete done, Script script, List<Variable> vars) { if(root == null) { return CVoid.VOID; } if(script == null) { script = new Script(null, null, env.getEnv(GlobalEnv.class).GetLabel(), new FileOptions(new HashMap<>())); } if(vars != null) { Map<String, Variable> varMap = new HashMap<>(); for(Variable v : vars) { varMap.put(v.getVariableName(), v); } for(Construct tempNode : root.getAllData()) { if(tempNode instanceof Variable) { Variable vv = varMap.get(((Variable) tempNode).getVariableName()); if(vv != null) { ((Variable) tempNode).setVal(vv.getDefault()); } else { //The variable is unset. I'm not quite sure what cases would cause this ((Variable) tempNode).setVal(""); } } } } StringBuilder b = new StringBuilder(); Construct returnable = null; for(ParseTree gg : root.getChildren()) { Construct retc = script.eval(gg, env); if(root.numberOfChildren() == 1) { returnable = retc; } String ret = retc instanceof CNull ? "null" : retc.val(); if(ret != null && !ret.trim().isEmpty()) { b.append(ret).append(" "); } } if(done != null) { done.done(b.toString().trim()); } if(returnable != null) { return returnable; } return Static.resolveConstruct(b.toString().trim(), Target.UNKNOWN); } public static void registerAutoIncludes(Environment env, Script s) { for(File f : Static.getAliasCore().autoIncludes) { try { MethodScriptCompiler.execute(IncludeCache.get(f, new Target(0, f, 0)), env, null, s); } catch (ProgramFlowManipulationException e) { ConfigRuntimeException.HandleUncaughtException(ConfigRuntimeException.CreateUncatchableException("Cannot break program flow in auto include files.", e.getTarget()), env); } catch (ConfigRuntimeException e) { e.setEnv(env); ConfigRuntimeException.HandleUncaughtException(e, env); } } } }
Don't report a compile exception for a missing function before linking (fixes function_exists() and extension_exists() in many cases)
src/main/java/com/laytonsmith/core/MethodScriptCompiler.java
Don't report a compile exception for a missing function before linking (fixes function_exists() and extension_exists() in many cases)
<ide><path>rc/main/java/com/laytonsmith/core/MethodScriptCompiler.java <ide> try { <ide> f = (Function) FunctionList.getFunction(t.getData()); <ide> } catch (ConfigCompileException ex) { <del> compilerErrors.add(ex); <del> return; <add> continue; <ide> } <ide> Set<OptimizationOption> options = NO_OPTIMIZATIONS; <ide> if(f instanceof Optimizable) {
Java
mit
c36039c06cc9592cf42ab48fe1e4846486f4527d
0
olavloite/spanner-jdbc
package nl.topicus.jdbc.statement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.sql.SQLWarning; import java.sql.Statement; import nl.topicus.jdbc.CloudSpannerConnection; import com.google.cloud.spanner.DatabaseClient; import com.google.cloud.spanner.Mutation; import com.google.cloud.spanner.ReadContext; import com.google.cloud.spanner.TransactionContext; import com.google.cloud.spanner.TransactionRunner.TransactionCallable; /** * * @author loite * */ abstract class AbstractCloudSpannerStatement implements Statement { private DatabaseClient dbClient; /** * Flag to indicate that this statement should use a SingleUseReadContext * regardless whether a transaction is running or not. This is for example * needed for meta data operations (select statements on * INFORMATION_SCHEMA). */ private boolean forceSingleUseReadContext; private boolean closed; private int queryTimeout; private boolean poolable; private boolean closeOnCompletion; private CloudSpannerConnection connection; private int maxRows; AbstractCloudSpannerStatement(CloudSpannerConnection connection, DatabaseClient dbClient) { this.connection = connection; this.dbClient = dbClient; } protected DatabaseClient getDbClient() { return dbClient; } public boolean isForceSingleUseReadContext() { return forceSingleUseReadContext; } public void setForceSingleUseReadContext(boolean forceSingleUseReadContext) { this.forceSingleUseReadContext = forceSingleUseReadContext; } protected ReadContext getReadContext() throws SQLException { if (connection.getAutoCommit() || forceSingleUseReadContext) { return dbClient.singleUse(); } return connection.getTransaction(); } protected int writeMutation(Mutation mutation) throws SQLException { if (connection.isReadOnly()) { throw new SQLException("Connection is in read-only mode. Mutations are not allowed"); } if (connection.getAutoCommit()) { dbClient.readWriteTransaction().run(new TransactionCallable<Void>() { @Override public Void run(TransactionContext transaction) throws Exception { transaction.buffer(mutation); return null; } }); } else { connection.getTransaction().buffer(mutation); } return 1; } @Override public <T> T unwrap(Class<T> iface) throws SQLException { return null; } @Override public boolean isWrapperFor(Class<?> iface) throws SQLException { return false; } @Override public boolean isClosed() throws SQLException { return closed; } @Override public void close() throws SQLException { closed = true; } protected void checkClosed() throws SQLException { if (isClosed()) throw new SQLException("Statement is closed"); } @Override public int getMaxFieldSize() throws SQLException { return 0; } @Override public void setMaxFieldSize(int max) throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public int getMaxRows() throws SQLException { return maxRows; } @Override public void setMaxRows(int max) throws SQLException { this.maxRows = max; } @Override public void setEscapeProcessing(boolean enable) throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public int getQueryTimeout() throws SQLException { return queryTimeout; } @Override public void setQueryTimeout(int seconds) throws SQLException { queryTimeout = seconds; } @Override public void cancel() throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public SQLWarning getWarnings() throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public void clearWarnings() throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public void setCursorName(String name) throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public void setFetchDirection(int direction) throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public int getFetchDirection() throws SQLException { return ResultSet.FETCH_FORWARD; } @Override public void setFetchSize(int rows) throws SQLException { // silently ignore } @Override public int getFetchSize() throws SQLException { return 0; } @Override public int getResultSetConcurrency() throws SQLException { return ResultSet.CONCUR_READ_ONLY; } @Override public int getResultSetType() throws SQLException { return ResultSet.TYPE_FORWARD_ONLY; } @Override public void addBatch(String sql) throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public void clearBatch() throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public int[] executeBatch() throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public CloudSpannerConnection getConnection() throws SQLException { return connection; } @Override public ResultSet getGeneratedKeys() throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public int getResultSetHoldability() throws SQLException { return ResultSet.HOLD_CURSORS_OVER_COMMIT; } @Override public void setPoolable(boolean poolable) throws SQLException { this.poolable = poolable; } @Override public boolean isPoolable() throws SQLException { return poolable; } @Override public void closeOnCompletion() throws SQLException { closeOnCompletion = true; } @Override public boolean isCloseOnCompletion() throws SQLException { return closeOnCompletion; } }
src/main/java/nl/topicus/jdbc/statement/AbstractCloudSpannerStatement.java
package nl.topicus.jdbc.statement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.sql.SQLWarning; import java.sql.Statement; import nl.topicus.jdbc.CloudSpannerConnection; import com.google.cloud.spanner.DatabaseClient; import com.google.cloud.spanner.Mutation; import com.google.cloud.spanner.ReadContext; import com.google.cloud.spanner.TransactionContext; import com.google.cloud.spanner.TransactionRunner.TransactionCallable; /** * * @author loite * */ abstract class AbstractCloudSpannerStatement implements Statement { private DatabaseClient dbClient; /** * Flag to indicate that this statement should use a SingleUseReadContext * regardless whether a transaction is running or not. This is for example * needed for meta data operations (select statements on * INFORMATION_SCHEMA). */ private boolean forceSingleUseReadContext; private boolean closed; private int queryTimeout; private boolean poolable; private boolean closeOnCompletion; private CloudSpannerConnection connection; private int maxRows; AbstractCloudSpannerStatement(CloudSpannerConnection connection, DatabaseClient dbClient) { this.connection = connection; this.dbClient = dbClient; } protected DatabaseClient getDbClient() { return dbClient; } public boolean isForceSingleUseReadContext() { return forceSingleUseReadContext; } public void setForceSingleUseReadContext(boolean forceSingleUseReadContext) { this.forceSingleUseReadContext = forceSingleUseReadContext; } protected ReadContext getReadContext() throws SQLException { if (connection.getAutoCommit() || forceSingleUseReadContext) { return dbClient.singleUse(); } return connection.getTransaction(); } protected int writeMutation(Mutation mutation) throws SQLException { if (connection.isReadOnly()) { throw new SQLException("Connection is in read-only mode. Mutations are not allowed"); } if (connection.getAutoCommit()) { dbClient.readWriteTransaction().run(new TransactionCallable<Void>() { @Override public Void run(TransactionContext transaction) throws Exception { transaction.buffer(mutation); return null; } }); } else { connection.getTransaction().buffer(mutation); } return 1; } @Override public <T> T unwrap(Class<T> iface) throws SQLException { return null; } @Override public boolean isWrapperFor(Class<?> iface) throws SQLException { return false; } @Override public boolean isClosed() throws SQLException { return closed; } @Override public void close() throws SQLException { closed = true; } protected void checkClosed() throws SQLException { if (isClosed()) throw new SQLException("Statement is closed"); } @Override public int getMaxFieldSize() throws SQLException { return 0; } @Override public void setMaxFieldSize(int max) throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public int getMaxRows() throws SQLException { return maxRows; } @Override public void setMaxRows(int max) throws SQLException { this.maxRows = max; } @Override public void setEscapeProcessing(boolean enable) throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public int getQueryTimeout() throws SQLException { return queryTimeout; } @Override public void setQueryTimeout(int seconds) throws SQLException { queryTimeout = seconds; } @Override public void cancel() throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public SQLWarning getWarnings() throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public void clearWarnings() throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public void setCursorName(String name) throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public void setFetchDirection(int direction) throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public int getFetchDirection() throws SQLException { return ResultSet.FETCH_FORWARD; } @Override public void setFetchSize(int rows) throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public int getFetchSize() throws SQLException { return 0; } @Override public int getResultSetConcurrency() throws SQLException { return ResultSet.CONCUR_READ_ONLY; } @Override public int getResultSetType() throws SQLException { return ResultSet.TYPE_FORWARD_ONLY; } @Override public void addBatch(String sql) throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public void clearBatch() throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public int[] executeBatch() throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public CloudSpannerConnection getConnection() throws SQLException { return connection; } @Override public ResultSet getGeneratedKeys() throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public int getResultSetHoldability() throws SQLException { return ResultSet.HOLD_CURSORS_OVER_COMMIT; } @Override public void setPoolable(boolean poolable) throws SQLException { this.poolable = poolable; } @Override public boolean isPoolable() throws SQLException { return poolable; } @Override public void closeOnCompletion() throws SQLException { closeOnCompletion = true; } @Override public boolean isCloseOnCompletion() throws SQLException { return closeOnCompletion; } }
silently ignore fetchSize
src/main/java/nl/topicus/jdbc/statement/AbstractCloudSpannerStatement.java
silently ignore fetchSize
<ide><path>rc/main/java/nl/topicus/jdbc/statement/AbstractCloudSpannerStatement.java <ide> @Override <ide> public void setFetchSize(int rows) throws SQLException <ide> { <del> throw new SQLFeatureNotSupportedException(); <add> // silently ignore <ide> } <ide> <ide> @Override
Java
apache-2.0
853b7ec9f4bdbe88233282bbf59f06fac71a8eb1
0
azureplus/spring-flex,josebarragan/spring-flex,josebarragan/spring-flex,josebarragan/spring-flex,azureplus/spring-flex,spring-projects/spring-flex,spring-projects/spring-flex,azureplus/spring-flex,spring-projects/spring-flex
/* * Copyright 2002-2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.flex.security3; import java.security.Principal; import java.util.ArrayList; import java.util.List; import java.util.Map; import javax.servlet.ServletConfig; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.springframework.beans.factory.InitializingBean; import org.springframework.flex.config.MessageBrokerConfigProcessor; import org.springframework.flex.core.MessageBrokerFactoryBean; import org.springframework.security.authentication.AuthenticationManager; import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; import org.springframework.security.core.Authentication; import org.springframework.security.core.AuthenticationException; import org.springframework.security.core.GrantedAuthority; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.security.web.authentication.NullRememberMeServices; import org.springframework.security.web.authentication.RememberMeServices; import org.springframework.security.web.authentication.logout.LogoutHandler; import org.springframework.security.web.authentication.session.NullAuthenticatedSessionStrategy; import org.springframework.security.web.authentication.session.SessionAuthenticationStrategy; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; import flex.messaging.FlexContext; import flex.messaging.io.MessageIOConstants; import flex.messaging.security.LoginCommand; /** * Custom BlazeDS {@link LoginCommand} that uses Spring Security for Authentication and Authorization. * * <p> * Should be configured as a Spring bean and given a reference to the current {@link AuthenticationManager}. It must be * added to the {@link MessageBrokerFactoryBean}'s list of {@link MessageBrokerConfigProcessor}s. * * <p> * Will be configured automatically when using the <code>secured</code> tag in the xml config namespace. * * @author Jeremy Grelle * * @see org.springframework.flex.core.MessageBrokerFactoryBean */ public class SpringSecurityLoginCommand implements LoginCommand, InitializingBean { private final AuthenticationManager authManager; private List<LogoutHandler> logoutHandlers; private RememberMeServices rememberMeServices; private SessionAuthenticationStrategy sessionStrategy; private boolean perClientAuthentication = false; /** * Creates a new SpringSecurityLoginCommand with the provided {@link AuthenticationManager} * * @param authManager the authentication manager */ public SpringSecurityLoginCommand(AuthenticationManager authManager) { Assert.notNull(authManager, "AuthenticationManager is required."); this.authManager = authManager; } public void afterPropertiesSet() throws Exception { if (this.sessionStrategy == null) { this.sessionStrategy = new NullAuthenticatedSessionStrategy(); } if (this.rememberMeServices == null) { this.rememberMeServices = new NullRememberMeServices(); } if (this.logoutHandlers == null) { this.logoutHandlers = new ArrayList<LogoutHandler>(); } if (ClassUtils.isAssignableValue(LogoutHandler.class, this.rememberMeServices) && !this.logoutHandlers.contains(this.rememberMeServices)) { this.logoutHandlers.add((LogoutHandler) this.rememberMeServices); } } /** * * {@inheritDoc} */ public Principal doAuthentication(String username, Object credentials) { HttpServletRequest request = FlexContext.getHttpRequest(); HttpServletResponse response = FlexContext.getHttpResponse(); try { Authentication authentication = this.authManager.authenticate(new UsernamePasswordAuthenticationToken(username, extractPassword(credentials))); if (authentication != null) { if (!isPerClientAuthentication() && request != null && response != null) { this.sessionStrategy.onAuthentication(authentication, request, response); this.rememberMeServices.loginSuccess(request, response, authentication); } SecurityContextHolder.getContext().setAuthentication(authentication); } return authentication; } catch (AuthenticationException ex) { SecurityContextHolder.clearContext(); if (request != null && response != null && !isPerClientAuthentication()) { this.rememberMeServices.loginFail(request, response); } throw ex; } } /** * * {@inheritDoc} */ @SuppressWarnings("rawtypes") public boolean doAuthorization(Principal principal, List roles) { Assert.isInstanceOf(Authentication.class, principal, "This LoginCommand expects a Principal of type " + Authentication.class.getName()); Authentication auth = (Authentication) principal; if (auth == null || auth.getPrincipal() == null || auth.getAuthorities() == null) { return false; } for (GrantedAuthority grantedAuthority : auth.getAuthorities()) { if (roles.contains(grantedAuthority.getAuthority())) { return true; } } return false; } /** * Returns the Spring Security {@link AuthenticationManager} * * @return the authentication manager */ public AuthenticationManager getAuthManager() { return this.authManager; } /** * Checks whether per-client authentication is enabled * * @return true is per-client authentication is enabled */ public boolean isPerClientAuthentication() { return this.perClientAuthentication; } /** * * {@inheritDoc} */ public boolean logout(Principal principal) { HttpServletRequest request = FlexContext.getHttpRequest(); HttpServletResponse response = FlexContext.getHttpResponse(); Authentication auth = SecurityContextHolder.getContext().getAuthentication(); if (request != null && response != null) { for (LogoutHandler handler : logoutHandlers) { handler.logout(request, response, auth); } } else { SecurityContextHolder.clearContext(); } return true; } public void setLogoutHandlers(List<LogoutHandler> logoutHandlers) { this.logoutHandlers = logoutHandlers; } public void setPerClientAuthentication(boolean perClientAuthentication) { this.perClientAuthentication = perClientAuthentication; } public void setRememberMeServices(RememberMeServices rememberMeServices) { this.rememberMeServices = rememberMeServices; } public void setSessionAuthenticationStrategy(SessionAuthenticationStrategy sessionStrategy) { this.sessionStrategy = sessionStrategy; } /** * * {@inheritDoc} */ public void start(ServletConfig config) { // Nothing to do } /** * * {@inheritDoc} */ public void stop() { SecurityContextHolder.clearContext(); } /** * Extracts the password from the Flex client credentials * * @param credentials the Flex client credentials * @return the extracted password */ @SuppressWarnings("rawtypes") protected String extractPassword(Object credentials) { String password = null; if (credentials instanceof String) { password = (String) credentials; } else if (credentials instanceof Map) { password = (String) ((Map) credentials).get(MessageIOConstants.SECURITY_CREDENTIALS); } return password; } }
spring-flex-core/src/main/java/org/springframework/flex/security3/SpringSecurityLoginCommand.java
/* * Copyright 2002-2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.flex.security3; import java.security.Principal; import java.util.ArrayList; import java.util.List; import java.util.Map; import javax.servlet.ServletConfig; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.springframework.beans.factory.InitializingBean; import org.springframework.flex.config.MessageBrokerConfigProcessor; import org.springframework.flex.core.MessageBrokerFactoryBean; import org.springframework.security.authentication.AuthenticationManager; import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; import org.springframework.security.core.Authentication; import org.springframework.security.core.AuthenticationException; import org.springframework.security.core.GrantedAuthority; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.security.web.authentication.NullRememberMeServices; import org.springframework.security.web.authentication.RememberMeServices; import org.springframework.security.web.authentication.logout.LogoutHandler; import org.springframework.security.web.authentication.session.NullAuthenticatedSessionStrategy; import org.springframework.security.web.authentication.session.SessionAuthenticationStrategy; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; import flex.messaging.FlexContext; import flex.messaging.io.MessageIOConstants; import flex.messaging.security.LoginCommand; /** * Custom BlazeDS {@link LoginCommand} that uses Spring Security for Authentication and Authorization. * * <p> * Should be configured as a Spring bean and given a reference to the current {@link AuthenticationManager}. It must be * added to the {@link MessageBrokerFactoryBean}'s list of {@link MessageBrokerConfigProcessor}s. * * <p> * Will be configured automatically when using the <code>secured</code> tag in the xml config namespace. * * @author Jeremy Grelle * * @see org.springframework.flex.core.MessageBrokerFactoryBean */ public class SpringSecurityLoginCommand implements LoginCommand, InitializingBean { private final AuthenticationManager authManager; private List<LogoutHandler> logoutHandlers; private RememberMeServices rememberMeServices; private SessionAuthenticationStrategy sessionStrategy; private boolean perClientAuthentication = false; /** * Creates a new SpringSecurityLoginCommand with the provided {@link AuthenticationManager} * * @param authManager the authentication manager */ public SpringSecurityLoginCommand(AuthenticationManager authManager) { Assert.notNull(authManager, "AuthenticationManager is required."); this.authManager = authManager; } public void afterPropertiesSet() throws Exception { if (this.sessionStrategy == null) { this.sessionStrategy = new NullAuthenticatedSessionStrategy(); } if (this.rememberMeServices == null) { this.rememberMeServices = new NullRememberMeServices(); } if (this.logoutHandlers == null) { this.logoutHandlers = new ArrayList<LogoutHandler>(); } if (ClassUtils.isAssignableValue(LogoutHandler.class, this.rememberMeServices) && !this.logoutHandlers.contains(this.rememberMeServices)) { this.logoutHandlers.add((LogoutHandler) this.rememberMeServices); } } /** * * {@inheritDoc} */ public Principal doAuthentication(String username, Object credentials) { HttpServletRequest request = FlexContext.getHttpRequest(); HttpServletResponse response = FlexContext.getHttpResponse(); try { Authentication authentication = this.authManager.authenticate(new UsernamePasswordAuthenticationToken(username, extractPassword(credentials))); SecurityContextHolder.getContext().setAuthentication(authentication); if (authentication != null && !isPerClientAuthentication()) { if (request != null && response != null) { this.sessionStrategy.onAuthentication(authentication, request, response); this.rememberMeServices.loginSuccess(request, response, authentication); } } return authentication; } catch (AuthenticationException ex) { if (request != null && response != null && !isPerClientAuthentication()) { this.rememberMeServices.loginFail(request, response); } throw ex; } } /** * * {@inheritDoc} */ @SuppressWarnings("rawtypes") public boolean doAuthorization(Principal principal, List roles) { Assert.isInstanceOf(Authentication.class, principal, "This LoginCommand expects a Principal of type " + Authentication.class.getName()); Authentication auth = (Authentication) principal; if (auth == null || auth.getPrincipal() == null || auth.getAuthorities() == null) { return false; } for (GrantedAuthority grantedAuthority : auth.getAuthorities()) { if (roles.contains(grantedAuthority.getAuthority())) { return true; } } return false; } /** * Returns the Spring Security {@link AuthenticationManager} * * @return the authentication manager */ public AuthenticationManager getAuthManager() { return this.authManager; } /** * Checks whether per-client authentication is enabled * * @return true is per-client authentication is enabled */ public boolean isPerClientAuthentication() { return this.perClientAuthentication; } /** * * {@inheritDoc} */ public boolean logout(Principal principal) { HttpServletRequest request = FlexContext.getHttpRequest(); HttpServletResponse response = FlexContext.getHttpResponse(); Authentication auth = SecurityContextHolder.getContext().getAuthentication(); if (request != null && response != null) { for (LogoutHandler handler : logoutHandlers) { handler.logout(request, response, auth); } } else { SecurityContextHolder.clearContext(); } return true; } public void setLogoutHandlers(List<LogoutHandler> logoutHandlers) { this.logoutHandlers = logoutHandlers; } public void setPerClientAuthentication(boolean perClientAuthentication) { this.perClientAuthentication = perClientAuthentication; } public void setRememberMeServices(RememberMeServices rememberMeServices) { this.rememberMeServices = rememberMeServices; } public void setSessionAuthenticationStrategy(SessionAuthenticationStrategy sessionStrategy) { this.sessionStrategy = sessionStrategy; } /** * * {@inheritDoc} */ public void start(ServletConfig config) { // Nothing to do } /** * * {@inheritDoc} */ public void stop() { SecurityContextHolder.clearContext(); } /** * Extracts the password from the Flex client credentials * * @param credentials the Flex client credentials * @return the extracted password */ @SuppressWarnings("rawtypes") protected String extractPassword(Object credentials) { String password = null; if (credentials instanceof String) { password = (String) credentials; } else if (credentials instanceof Map) { password = (String) ((Map) credentials).get(MessageIOConstants.SECURITY_CREDENTIALS); } return password; } }
Fix for FLEX-193 - Concurrency control doesn't work as expected
spring-flex-core/src/main/java/org/springframework/flex/security3/SpringSecurityLoginCommand.java
Fix for FLEX-193 - Concurrency control doesn't work as expected
<ide><path>pring-flex-core/src/main/java/org/springframework/flex/security3/SpringSecurityLoginCommand.java <ide> HttpServletResponse response = FlexContext.getHttpResponse(); <ide> try { <ide> Authentication authentication = this.authManager.authenticate(new UsernamePasswordAuthenticationToken(username, extractPassword(credentials))); <del> SecurityContextHolder.getContext().setAuthentication(authentication); <del> if (authentication != null && !isPerClientAuthentication()) { <del> if (request != null && response != null) { <add> if (authentication != null) { <add> if (!isPerClientAuthentication() && request != null && response != null) { <ide> this.sessionStrategy.onAuthentication(authentication, request, response); <ide> this.rememberMeServices.loginSuccess(request, response, authentication); <ide> } <add> SecurityContextHolder.getContext().setAuthentication(authentication); <ide> } <ide> return authentication; <ide> } catch (AuthenticationException ex) { <add> SecurityContextHolder.clearContext(); <ide> if (request != null && response != null && !isPerClientAuthentication()) { <ide> this.rememberMeServices.loginFail(request, response); <ide> }
JavaScript
mit
24b5d46b08160fb042637609016b3c3c2e904710
0
LudoZeGeek/Resor,LudoZeGeek/Resor,LudoZeGeek/Resor
(function ($, top, GMaps, angular) { $(function () { var changePosition = $('.js-change-position'), positionInput = $('.js-position-place'), latInput = $('.js-lat'), lngInput = $('.js-lng'), map; map = new GMaps({ div: '#map', lat: +(latInput.val()), lng: +(lngInput.val()) }); }); }(jQuery, window, GMaps, angular)); (function () { var app = angular.module("resultsApp", []); app.filter("capitalize", function () { return function (s) { return s[0].toUpperCase() + s.slice(1); }; }); var lorem = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce in dignissim ex. Ut efficitur libero sed ipsum laoreet, a laoreet nunc consequat. Vestibulum mollis quis dui non commodo. Donec nec neque id nulla volutpat maximus. Nulla facilisi. Nullam ultricies lacinia diam, nec consectetur massa congue ac. Phasellus scelerisque at enim sed rhoncus. Ut ut ex leo. Nunc eu libero leo. Phasellus placerat luctus interdum. Duis efficitur laoreet dolor, malesuada rutrum neque congue sit amet. Aliquam semper arcu sapien. "; app.controller('ResultCtrl', function ($scope) { $scope.results = [ { title: "Camping des flots bleus", description: lorem, price: "34", features: ["pool", "animals", "spa", "jacuzzi"] }, { title: "Camping de l'océan", description: lorem, price: "27", features: ["spa", "jacuzzi"] }, { title: "Camping de la plage", description: lorem, price: "39", features: ["pool", "spa", "jacuzzi"] }, { title: "Camping du soleil", description: lorem, price: "42", features: ["pool", "animals"] }, { title: "Camping Serge", description: lorem, price: "19", features: ["spa", "jacuzzi"] } ]; $scope.filters = _.map(_.reduce(_.map($scope.results, function (result) { return result.features; }), function (memo, curr) { return _.union(memo, curr); }, []), function (filter) { return { name: filter, on: false }; }); $scope.filterByFeatures = function (result) { return $scope.filters.reduce(function(memo, filter){ return memo && !(filter.on && result.features.indexOf(filter.name) < 0) }, true); }; }); }());
src/Resor/Bundle/CoreBundle/Resources/public/js/results.js
(function ($, top, GMaps, angular) { $(function () { var changePosition = $('.js-change-position'), positionInput = $('.js-position-place'), latInput = $('.js-lat'), lngInput = $('.js-lng'), map; map = new GMaps({ div: '#map', lat: +(latInput.val()), lng: +(lngInput.val()) }); }); }(jQuery, window, GMaps, angular)); (function () { var app = angular.module("resultsApp", []); app.filter("capitalize", function () { return function (s) { return s[0].toUpperCase() + s.slice(1); }; }); var lorem = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce in dignissim ex. Ut efficitur libero sed ipsum laoreet, a laoreet nunc consequat. Vestibulum mollis quis dui non commodo. Donec nec neque id nulla volutpat maximus. Nulla facilisi. Nullam ultricies lacinia diam, nec consectetur massa congue ac. Phasellus scelerisque at enim sed rhoncus. Ut ut ex leo. Nunc eu libero leo. Phasellus placerat luctus interdum. Duis efficitur laoreet dolor, malesuada rutrum neque congue sit amet. Aliquam semper arcu sapien. "; app.controller('ResultCtrl', function ($scope) { $scope.results = [ { title: "Camping des flots bleus", description: lorem, price: "34", features: ["pool", "animals", "spa", "jacuzzi"] }, { title: "Camping de l'océan", description: lorem, price: "27", features: ["spa", "jacuzzi"] }, { title: "Camping de la plage", description: lorem, price: "39", features: ["pool", "spa", "jacuzzi"] }, { title: "Camping du soleil", description: lorem, price: "42", features: ["pool", "animals"] }, { title: "Camping Serge", description: lorem, price: "19", features: ["spa", "jacuzzi"] } ]; $scope.filters = [ { name: "pool", on: true }, { name: "spa", on: false }, { name: "jacuzzi", on: false }, { name: "animals", on: false }, { name: "children", on: false } ]; $scope.filterByFeatures = function (result) { return $scope.filters.reduce(function(memo, filter){ return memo && !(filter.on && result.features.indexOf(filter.name) < 0) }, true); }; }); }());
#3 - Get filters from available filters
src/Resor/Bundle/CoreBundle/Resources/public/js/results.js
#3 - Get filters from available filters
<ide><path>rc/Resor/Bundle/CoreBundle/Resources/public/js/results.js <ide> } <ide> ]; <ide> <del> $scope.filters = [ <del> { <del> name: "pool", <del> on: true <del> }, <del> { <del> name: "spa", <add> $scope.filters = _.map(_.reduce(_.map($scope.results, function (result) { <add> return result.features; <add> }), function (memo, curr) { <add> return _.union(memo, curr); <add> }, []), function (filter) { <add> return { <add> name: filter, <ide> on: false <del> }, <del> { <del> name: "jacuzzi", <del> on: false <del> }, <del> { <del> name: "animals", <del> on: false <del> }, <del> { <del> name: "children", <del> on: false <del> } <del> ]; <add> }; <add> }); <ide> <ide> $scope.filterByFeatures = function (result) { <ide> return $scope.filters.reduce(function(memo, filter){
Java
bsd-3-clause
66c92c796b487824c7335aec5cb36fea2191d78c
0
NCIP/caadapter,NCIP/caadapter,NCIP/caadapter
/** * <!-- LICENSE_TEXT_START --> The contents of this file are subject to the caAdapter Software License (the "License"). You may obtain a copy of the License at the following location: [caAdapter Home Directory]\docs\caAdapter_license.txt, or at: http://ncicb.nci.nih.gov/infrastructure/cacore_overview/caadapter/indexContent/docs/caAdapter_License * <!-- LICENSE_TEXT_END --> */ package gov.nih.nci.caadapter.common.util; import edu.knu.medinfo.hl7.v2tree.HL7MessageTreeException; import edu.knu.medinfo.hl7.v2tree.MetaDataLoader; import gov.nih.nci.caadapter.common.Log; import gov.nih.nci.caadapter.common.function.DateFunction; import gov.nih.nci.caadapter.common.function.FunctionException; //import gov.nih.nci.caadapter.hl7.mif.NormativeVersionUtil; import javax.swing.*; import java.awt.*; import java.io.*; import java.net.MalformedURLException; import java.net.SocketTimeoutException; import java.net.URL; import java.net.URLConnection; import java.util.*; import java.util.List; import java.util.logging.FileHandler; /** * File related utility class * * @author OWNER: Matthew Giordano * @author LAST UPDATE $Author: altturbo $ * @version $Revision: 1.32 $ */ public class FileUtil { private static final String OUTPUT_DIR_NAME = "out"; private static File OUTPUT_DIR = null; private static File ODI_FILE = null; private static MetaDataLoader v2Loader = null; /** * Create the output directory if it doesn't exist. */ private static void setupOutputDir() { OUTPUT_DIR = new File(OUTPUT_DIR_NAME); if (!OUTPUT_DIR.isDirectory()) { OUTPUT_DIR.mkdir(); } } public static String getAssociatedFileAbsolutePath(String holderFile, String associatedFile) { if(associatedFile.indexOf(File.separator)>-1) return associatedFile; File holder=new File(holderFile); File associted=new File(associatedFile); if (!holder.exists()) return associatedFile; if (holder.isDirectory()) return associatedFile; String holderParent=holder.getParent(); String rntPath=holderParent+File.separator+associatedFile; return rntPath; } /** * Compare the name/absolute path of a holder file with its associated file * return the name of the assoicated file without parentPath if they have the same * parent, otherwise return the input value of the associated file * @param holderFile the name/absolutePath of a holder file * @param associatedFile the name/absolutePath of an associated file * @return name of the associated file to be used as reference from the holder file */ public static String getAssociatedFileRelativePath(String holderFile, String associatedFile) { File holder=new File(holderFile); File associted=new File(associatedFile); if (!holder.exists()) return associatedFile; if (holder.isDirectory()) return associatedFile; if (!associted.exists()) return associatedFile; if (associted.isDirectory()) return associatedFile; String holderParent=holder.getParent(); String associateParent=associted.getParent(); if (!holderParent.equals(associateParent)) return associatedFile; return associted.getName(); } /** * Create the output directory if necessary and return a reference to it. * * @return The output directory */ public static File getOutputDir() { FileUtil.setupOutputDir(); return OUTPUT_DIR; } public static String getWorkingDirPath() { File f = new File(""); return f.getAbsolutePath(); } public static String getComponentsDirPath() { return getWorkingDirPath() + File.separator + "components"; } public static String getCommonDirPath() { return getComponentsDirPath() + File.separator + "common"; } public static String getDataViewerDirPath() { return getComponentsDirPath() + File.separator + "dataviewer"; } public static String getHL7TransformationDirPath() { return getComponentsDirPath() + File.separator + "hl7Transformation"; } public static String getModelMappingDirPath() { return getComponentsDirPath() + File.separator + "modelMapping"; } public static String getSDTMTransformationDirPath() { return getComponentsDirPath() + File.separator + "sdtmTransformation"; } public static String getUserInterfaceDirPath() { return getComponentsDirPath() + File.separator + "userInterface"; } public static String getWebServicesDirPath() { return getComponentsDirPath() + File.separator + "webservices"; } public static String getETCDirPath() // inserted bt umkis 08/09/2006 { File f = new File("./etc"); return f.getAbsolutePath(); } public static String getExamplesDirPath() { File f = new File("./workingspace/examples"); return f.getAbsolutePath(); } public static String getV2DataDirPath() { File f = new File(getWorkingDirPath() + File.separator + "data" + File.separator + "v2Meta"); if ((!f.exists())||(!f.isDirectory())) f.mkdirs(); return f.getAbsolutePath(); //return getV2DataDirPath(null); } public static String getV2DataDirPath(Component parent) { File f = new File(getWorkingDirPath() + File.separator + "data" + File.separator + "v2Meta"); if ((!f.exists())||(!f.isDirectory())) { if (parent == null) return null; String display = "HL7 v2 meta Directory isn't created yet.\nPress 'Yes' button if you want to create directory.\nIt may takes some minutes."; //JOptionPane.showMessageDialog(parent, "Making V2 Meta Directory", display, JOptionPane.WARNING_MESSAGE); //System.out.println("CCCCV : " + display); int res = JOptionPane.showConfirmDialog(parent, display, "Create v2 Meta Directory", JOptionPane.YES_NO_OPTION, JOptionPane.INFORMATION_MESSAGE); if (res != JOptionPane.YES_OPTION) return ""; ClassLoaderUtil loaderUtil = null; try { loaderUtil = new ClassLoaderUtil("v2Meta"); } catch(IOException ie) { JOptionPane.showMessageDialog(parent, ie.getMessage() + ".\n Check the resourceV2.zip file in the library.", "Creating V2 Meta Directory failure", JOptionPane.WARNING_MESSAGE); //System.err.println("Make V2 Meta Directory : " + ie.getMessage()); return null; } for(int i=0;i<loaderUtil.getSizeOfFiles();i++) { String path = loaderUtil.getPath(i); if (!path.trim().toLowerCase().endsWith(".dat")) continue; path = path.replace("/", File.separator); path = getWorkingDirPath() + File.separator + "data" + File.separator + path; int index = -1; for (int j=path.length();j>0;j--) { String achar = path.substring(j-1, j); if (achar.equals(File.separator)) { index = j; break; } } //System.out.println("V2 Meta : " + path); if (index <= 0) { JOptionPane.showMessageDialog(parent, "V2 Meta file is invalid : " + path, "Creating V2 Meta Directory failure", JOptionPane.WARNING_MESSAGE); //System.err.println("V2 Meta file is invalid : " + path); return null; } File dir = new File(path.substring(0,(index-1))); if ((!dir.exists())||(!dir.isDirectory())) { if (!dir.mkdirs()) { JOptionPane.showMessageDialog(parent, "V2 Meta directory making failure : " + dir, "Creating V2 Meta Directory failure", JOptionPane.WARNING_MESSAGE); //System.err.println("V2 Meta directory making failure : " + dir); return null; } } File datFile = new File(loaderUtil.getFileName(i)); if ((!datFile.exists())||(!datFile.isFile())) { JOptionPane.showMessageDialog(parent, "Not Found This V2 Meta temporary file : " + path, "Creating V2 Meta Directory failure", JOptionPane.WARNING_MESSAGE); continue; } if (!datFile.renameTo(new File(path))) System.err.println("V2 Meta rename failure : " + path); } } return f.getAbsolutePath(); } public static String searchPropertyAsFilePath(String key) { return searchProperty(null, key, false, true); } public static String searchProperty(String key) { return searchProperty(null, key, false, false); } public static String searchProperty(String key, boolean useProperty) { return searchProperty(null, key, useProperty, false); } private static String searchProperty(File dir, String key, boolean useProperty, boolean isFilePath) { File sDir = null; if (dir == null) sDir = new File(getWorkingDirPath()); else { if (!dir.isDirectory()) return null; sDir = dir; } String res = null; File[] files = sDir.listFiles(); for(File file:files) { String fName = file.getName(); if (file.isFile()) { if ((fName.toLowerCase().endsWith(".properties"))|| (fName.toLowerCase().endsWith(".property"))) {} else continue; if (useProperty) { res = getPropertyFromComponentPropertyFile(file.getAbsolutePath(), key); if (res != null) { if (isFilePath) { String path = checkValidFilePath(res); if (path != null) return path; } else return res; } continue; } List<String> list = null; try { list = readFileIntoList(file.getAbsolutePath()); } catch(IOException ie) { continue; } for (String line:list) { line = line.trim(); if (line.startsWith("#")) continue; if (line.startsWith("!")) continue; int idx = line.indexOf("="); if (idx < 0) idx = line.indexOf(":"); if (idx <= 0) continue; String keyS = line.substring(0, idx).trim(); if (!keyS.equals(key.trim())) continue; res = line.substring(idx+1).trim(); if (isFilePath) { String path = checkValidFilePath(res); if (path != null) return path; } else return res; } } else if (file.isDirectory()) { if ((fName.equalsIgnoreCase("conf"))|| (fName.equalsIgnoreCase("etc"))) res = searchProperty(file, key, useProperty, isFilePath); if (res != null) return res; } } res = null; if (sDir.getName().equalsIgnoreCase("dist")) res = searchProperty(sDir.getParentFile(), key, useProperty, isFilePath); if (res != null) return res; return null; } public static String searchFile(String fileName) { return searchFile(null, fileName); } public static String searchFile(File dir, String fileName) { if (fileName == null) return null; fileName = fileName.trim(); if (fileName.equals("")) return null; if (fileName.endsWith(File.separator)) fileName = fileName.substring(0, fileName.length()-File.separator.length()); if (fileName.endsWith("/")) fileName = fileName.substring(0, fileName.length()-1); while(true) { int idx = fileName.indexOf(File.separator); int len = 0; if (idx >= 0) len = File.separator.length(); else { idx = fileName.indexOf("/"); if (idx >= 0) len = 1; } if (idx < 0) break; fileName = fileName.substring(idx + len); } if (dir == null) { File wDir = new File(getWorkingDirPath()); if (wDir.getName().equals("dist")) dir = wDir.getParentFile(); else dir = wDir; } if ((!dir.exists())||(!dir.isDirectory())) return null; File[] files = dir.listFiles(); for(File file:files) { if (file.getName().equals(fileName)) return file.getAbsolutePath(); if (file.isDirectory()) { String res = searchFile(file, fileName); if (res != null) return res; } } return null; } public static String checkValidFilePath(String data) { if (data == null) return null; data = data.trim(); if (data.equals("")) return null; while(true) { int idx = data.indexOf("\\\\"); if (idx < 0) break; data = data.substring(0, idx) + data.substring(idx + 1); } File file = new File(data); if (file.exists()) return file.getAbsolutePath(); return null; } /* public static String getV3XsdFilePath() { String schemaPath= NormativeVersionUtil.getCurrentMIFIndex().getSchemaPath(); File f = new File(schemaPath); if (!f.exists()) f = new File("../" + schemaPath); if (!f.exists()) { System.err.println("Not Found V3 XSD Directory..."); return null; } if (f.isDirectory()) return f.getAbsolutePath(); String parent = f.getParent(); if (!parent.endsWith(File.separator)) parent = parent + File.separator; File sdir = new File(parent + "schemas"); if ((sdir.exists())&&(sdir.isDirectory())) return sdir.getAbsolutePath(); System.err.println("Not Found V3 XSD Directory..."); return null; } */ public static MetaDataLoader getV2ResourceMetaDataLoader() { return getV2ResourceMetaDataLoader(null); } public static MetaDataLoader getV2ResourceMetaDataLoader(String resourceFile) { if (resourceFile == null) resourceFile = ""; else resourceFile = resourceFile.trim(); //if (v2Loader == null) System.out.println("CCC v3 meta loader (1) : " + resourceFile); //else System.out.println("CCC v3 meta loader (2) : " + v2Loader.getPath() + ", " + resourceFile); if (!resourceFile.equals("")) { MetaDataLoader loader = null; try { loader = new MetaDataLoader(resourceFile); } catch(HL7MessageTreeException he) { System.out.println("HL7MessageTreeException : " + he.getMessage()); return null; } v2Loader = loader; return loader; } if (v2Loader == null) { String name = "v2Meta/version2.4/MessageStructure/ADT_A01.dat"; Enumeration<URL> fileURLs = null; try { fileURLs= ClassLoader.getSystemResources(name); } catch(IOException ie) { System.out.println("IOException #1 : " + ie.getMessage()); } if (fileURLs == null) { System.out.println("ClassLoader Result : " + name + " : Not Found"); return null; } //System.out.println("Number of Result : " + fileURLs.toString()); boolean found = false; while(fileURLs.hasMoreElements()) { URL fileURL = fileURLs.nextElement(); String url = fileURL.toString(); if ((url.toLowerCase().startsWith("jar:"))||(url.toLowerCase().startsWith("zip:"))) { int idx = url.indexOf("!"); if (idx < 0) { System.err.println("Invalid jar file url : " + url); continue; } String jarFileName = url.substring(4, idx); try { v2Loader = new MetaDataLoader(jarFileName); found = true; } catch(HL7MessageTreeException he) { continue; } } if ((found)&&(v2Loader != null)) return v2Loader; } v2Loader = null; return null; } else return v2Loader; } /** * Copied from javaSig code * SInce 2005 Normative Edition, the name convention is changed. It has to following * HL7 v3 artifact naming convention: {UUDD_AAnnnnnnUVnn}. * - UU = Sub-Section code * - DD = Domain code * - AA = Artifact or Document code. * (Message Type will be MT) - nnnnnn = Six digit zero-filled number * - UV = Universal * - nn = ballot version * * Since the ballot version is different, starting from 01 to unknown, this function find related file * by guessing the version number * * THIS METHOD IS NOT USED BY JAVASIG CODE ANY MORE. IF IT WERE, IT WOULD HAVE TO BE * CONVERTED TO NOT USER Files BUT Resources INSTEAD. * TestParseAndBuild uses it, but that's O.K. Just those tests will eventually fail. * * * @param messageType * @param fileExtension * @return File Name */ public static String searchMessageTypeSchemaFileName(String messageType, String fileExtension) throws FileNotFoundException { String schemaFileNamePath =""; for (int i = -1; i < 100; i++) { String pad = ""; if (i < 0) pad = ""; else pad = i < 10 ? "UV0" + i : "UV" + String.valueOf(i); String schemaFileName = Config.SCHEMA_LOCATION+messageType+pad + "." + fileExtension; schemaFileNamePath=FileUtil.getWorkingDirPath() + File.separator + schemaFileName; File file = new File(schemaFileNamePath); if ((file.exists())&&(file.isFile())) return schemaFileName; // return file.getAbsolutePath(); URL fileURL= ClassLoader.getSystemResource(schemaFileName); if (fileURL!=null) return schemaFileName; // return fileURL.getFile(); } //Throw exception since file is not found.... throw new FileNotFoundException("File Directory:" + Config.SCHEMA_LOCATION + " Message Type:" + messageType + " File Extenstion:" + fileExtension + ", "+schemaFileNamePath); } /** * Return a convenient UI Working Directory, which may or may not be the same as the value from getWorkingDirPath(). * @return a convenient UI Working Directory, which may or may not be the same as the value from getWorkingDirPath(). */ public static String getUIWorkingDirectoryPath() { File f = new File("./workingspace"); if ((!f.exists())||(!f.isDirectory())) { f.mkdirs(); } return f.getAbsolutePath(); } /** * Generat a Temporary File Name at workingspace directory. * @return a Temporary File Name. */ public static String getTemporaryFileName() // inserted by umkis 08/09/2006 { return getTemporaryFileName(Config.TEMPORARY_FILE_EXTENSION); } /** * Generat a Temporary File Name at workingspace directory. * @param extension the extention of generated temp file * @return a Temporary File Name. */ public static String getTemporaryFileName(String extension) // inserted by umkis 08/09/2006 { DateFunction dateFunction = new DateFunction(); String dateFormat = dateFunction.getDefaultDateFormatString(); if (!dateFormat.endsWith("SSS")) dateFormat = dateFormat + "SSS"; try { return getUIWorkingDirectoryPath() + File.separator + Config.TEMPORARY_FILE_PREFIX + (new DateFunction()).getCurrentTime(dateFormat) + "_" + getRandomNumber(4) + extension; } catch(FunctionException fe) { return getUIWorkingDirectoryPath() + File.separator + Config.TEMPORARY_FILE_PREFIX + (new DateFunction()).getCurrentTime() + "_" + getRandomNumber(4) + extension; } } /** * Check the parameter whether a temporary file name or not. * * @param fileName * @return true if a temporary file name, else is false. */ public static boolean isTemporaryFileName(String fileName) // inserted by umkis 08/10/2006 { if (fileName.length() > 1024) return false; if ( //(fileName.endsWith(Config.TEMPORARY_FILE_EXTENSION)) && (fileName.indexOf(Config.TEMPORARY_FILE_PREFIX) >= 0) ) return true; else return false; } /** * Create a temporary file which includes the received string parameter. * * @param tempFileName file name of this temporary file. * @param string parameter which would like to be saved into this temporary file. * @throws IOException when saving is failed. */ public static void saveStringIntoTemporaryFile(String tempFileName, String string) throws IOException // inserted by umkis 12/26/2006 { FileWriter fw = null; File file = null; try { fw = new FileWriter(tempFileName); fw.write(string); fw.close(); file = new File(tempFileName); } catch(Exception ie) { throw new IOException("File Writing Error(" + tempFileName + ") : " + ie.getMessage() + ", value : " + string); } file.deleteOnExit(); } public static List<String> readFileIntoList(String fileName) throws IOException { List<String> list = new ArrayList<String>(); FileReader fr = null; try { fr = new FileReader(fileName); } catch(FileNotFoundException fe) { throw new IOException("FileNotFoundException in FileUtil.readFileIntoList() : " + fileName); } BufferedReader br = new BufferedReader(fr); String readLineOfFile = ""; try { while((readLineOfFile=br.readLine())!=null) list.add(readLineOfFile); } catch(IOException ie) { throw new IOException("File reading Error in FileUtil.readFileIntoList() : " + fileName); } try { fr.close(); br.close(); } catch(IOException ie) { throw new IOException("File Closing Error in FileUtil.readFileIntoList() : " + fileName); } return list; } public static String readFileIntoStringAllowException(String fileName) throws IOException { List<String> list = null; list = readFileIntoList(fileName); String output = ""; for(int i=0;i<list.size();i++) output = output + list.get(i) + "\r\n"; return output.trim(); } public static String readFileIntoString(String fileName) { String out = ""; try { out = readFileIntoStringAllowException(fileName); } catch(IOException ie) { return null; } return out; } public static String findODIWithDomainName(String str) throws IOException { if ((str == null)||(str.trim().equals(""))) return ""; if (ODI_FILE == null) { ClassLoaderUtil loaderUtil = new ClassLoaderUtil("instanceGen/HL7_ODI.csv"); if (loaderUtil.getFileNames().size() == 0) throw new IOException("HL7_ODI.csv file class loading failure."); ODI_FILE = new File(loaderUtil.getFileNames().get(0)); ODI_FILE.deleteOnExit(); } FileReader fr = null; //String fileName = ODI_FILE_NAME; try { fr = new FileReader(ODI_FILE); } catch(FileNotFoundException fe) { throw new IOException("ODI File : FileNotFoundException in FileUtil.readFileIntoList() : " + ODI_FILE.getName()); } BufferedReader br = new BufferedReader(fr); String readLineOfFile = ""; String result = ""; try { while((readLineOfFile=br.readLine())!=null) { //System.out.println("CCCYYYY : " + readLineOfFile); if ((readLineOfFile.startsWith("1."))||(readLineOfFile.startsWith("2."))||(readLineOfFile.startsWith("3."))||(readLineOfFile.startsWith("4."))) {} else continue; StringTokenizer st = new StringTokenizer(readLineOfFile, ","); int n = 0; String odi = ""; String domainName = ""; while(st.hasMoreTokens()) { if (n == 0) odi = st.nextToken().trim(); if (n == 1) domainName = st.nextToken().trim(); if (n == 2) break; n++; } //System.out.println("CCCXX : " + str + ", " + domainName + ", " + odi); if (str.trim().equals(domainName)) { result = odi; break; } } } catch(IOException ie) { throw new IOException("ODI File reading Error in FileUtil.readFileIntoList() : " + ODI_FILE.getName()); } try { fr.close(); br.close(); } catch(IOException ie) { throw new IOException("ODI File Closing Error in FileUtil.readFileIntoList() : " + ODI_FILE.getName()); } return result; } /** * Create a random integer number with digit number which was given by the caller. * For example, when digit number is 5, return value can be 34562 or 98123. * @param digit number of generated random number * @return generated random number */ public static int getRandomNumber(int digit) // inserted by umkis 08/10/2006 { if (digit <= 0) return 0; int in = 1; int la = 0; int sa = 0; for(int x=0;x<digit;x++) { in = in * 10; } la = in; sa = la / 10; return getRandomNumber(sa, la); } /** * Create a random integer number between max and min number which was given by the caller. * @param min : number of generated minimum random number * @param max : number of generated maximum random number * @return generated random number */ public static int getRandomNumber(int min, int max) // inserted by umkis 06/13/2007 { if (min == max) return max; if (min > max) { int t = max; max = min; min = t; } Random rnd = new Random(); int in = 0; int in1 = 0; if (max <= 0) { int min1 = 0 - max; int max1 = 0 - min; while(true) { in = rnd.nextInt(); in1 = in % max1; if (in1 >= min1) break; } in1 = 0 - in1; } else { while(true) { in = rnd.nextInt(); in1 = in % max; if (in1 >= min) break; } } return in1; } /** * Delete a lck file from the output directory. A lck file is a temporary file that is * created by the logger. * * @param filename */ public static void deleteLckFile(String filename) { File lckFile = new File(FileUtil.getOutputDir().getAbsolutePath() + File.separator + filename + ".lck"); if (lckFile != null && lckFile.delete()) { // do nothing } else { // lck file couldn't be deleted. } } public static String outputFile(String filename, String data) throws IOException { String fileName = FileUtil.getOutputDir().getAbsolutePath() + File.separator + filename; FileWriter out = new FileWriter(fileName); out.write(data); out.flush(); out.close(); return fileName; } /** * Create a filehandler to a log file that is located in the output directory. * * @param fileName the log that you want to create * @return the filehandler * @throws IOException */ public static FileHandler getLogFileHandle(String fileName) throws IOException { return new FileHandler(FileUtil.getOutputDir().getAbsolutePath() + File.separator + fileName); } /** * Search for a file by searching the classpath * (calling ClassLoader.getSystemResource()). * * @param fileName Name of the file you are looking for. * @return the path to the file * @throws FileNotFoundException */ public static String fileLocateOnClasspath(String fileName) throws FileNotFoundException { if (fileName.startsWith(Config.CAADAPTER_HOME_DIR_TAG)) fileName = fileName.replace(Config.CAADAPTER_HOME_DIR_TAG, getWorkingDirPath()); File f = new File(fileName); if (f.exists()) { return f.getAbsolutePath(); } URL u = null; u = ClassLoader.getSystemResource(fileName); if (u == null) { throw new FileNotFoundException(fileName + " - make sure the file is on the classpath."); } else { return u.getFile(); } } public static File fileLocate(String directory, String fileName)throws FileNotFoundException{ return new File(FileUtil.filenameLocate(directory,fileName)); } /** * Search for a file at the specified location and if it's not * found there look on the classpath by calling filenameLocate(fileName). * * @param directory the directory to look first * @param fileName the name fo the file * @return the path to the file * @throws FileNotFoundException */ public static String filenameLocate(String directory, String fileName) throws FileNotFoundException { if ((fileName == null)||(fileName.trim().equals(""))) throw new FileNotFoundException("Null file name..."); else fileName = fileName.trim(); if (fileName.startsWith(Config.CAADAPTER_HOME_DIR_TAG)) fileName = fileName.replace(Config.CAADAPTER_HOME_DIR_TAG, getWorkingDirPath()); // check just the filename File f = new File(fileName); if (f.exists()) { return f.getAbsolutePath(); } if ((directory == null)||(directory.trim().equals(""))) throw new FileNotFoundException("Null Dirctory..."); else directory = directory.trim(); if (directory.startsWith(Config.CAADAPTER_HOME_DIR_TAG)) directory = directory.replace(Config.CAADAPTER_HOME_DIR_TAG, getWorkingDirPath()); if (!directory.endsWith(File.separator)) directory = directory + File.separator; // check directory + filename f = new File(directory + fileName); if (f.exists()) { return f.getAbsolutePath(); } String temp = fileName; if (fileName.endsWith(File.separator)) fileName = fileName.substring(0, fileName.length()-File.separator.length()); if (fileName.endsWith("/")) fileName = fileName.substring(0, fileName.length()-1); while(true) { int idx = fileName.indexOf(File.separator); int len = 0; if (idx >= 0) len = File.separator.length(); else { idx = fileName.indexOf("/"); if (idx >= 0) len = 1; } if (idx < 0) break; fileName = fileName.substring(idx + len); } f = new File(directory + fileName); if (f.exists()) { //System.out.println("DDD : " + temp +" ; "+ fileName +" ; "+ f.getAbsolutePath()); return f.getAbsolutePath(); } fileName = temp; String fileLocation = null; try { fileLocation = fileLocateOnClasspath(fileName); } catch (FileNotFoundException fnfe) { throw new FileNotFoundException(fileName + " - make sure " + "the location is correct OR the file is on the classpath"); } return fileLocation; } public static String getLogFilename(String fullFilename) { if (fullFilename.startsWith(Config.CAADAPTER_HOME_DIR_TAG)) fullFilename = fullFilename.replace(Config.CAADAPTER_HOME_DIR_TAG, getWorkingDirPath()); String justFileName = new File(fullFilename).getName(); return justFileName + ".log"; } /** * Return the extension part given file name. * For example, if the name of the file is "foo.bar", ".bar" will be returned * if includeDelimiter is true, or "bar" will be returned if includeDelimiter is false; * otherwise, if no extension is specified in the file name, empty string is * returned instead of null. * * @param file * @param includeDelimiter * @return the extension or an empty string if nothing is found */ public static final String getFileExtension(File file, boolean includeDelimiter) { String result = ""; if (file != null) { String absoluteName = file.getAbsolutePath(); if (absoluteName != null) { int delimIndex = absoluteName.lastIndexOf("."); if (delimIndex != -1) {//include the . delimiter if (!includeDelimiter) {//skip the . delimiter delimIndex++; } result = absoluteName.substring(delimIndex); } } } return result; } /** * Construct a list of V3 Message file names and return. * @param userSpecifiedFile * @param numberOfMessages * @param extension * @param extensionIncludesDelimiter * @return a list of V3 Message file names. */ public static final java.util.List<java.io.File> constructHL7V3MessageFileNames(File userSpecifiedFile, int numberOfMessages, String extension, boolean extensionIncludesDelimiter) { java.util.List<File> resultList = new ArrayList<File>(); if(userSpecifiedFile==null) { Log.logWarning(FileUtil.class, "constructHL7V3MessageFileNames(): user specified file is null."); return resultList; } String extensionLocal = getFileExtension(userSpecifiedFile, extensionIncludesDelimiter); String absoluteFileName = userSpecifiedFile.getAbsolutePath(); if(GeneralUtilities.areEqual(extensionLocal, extension)) {//already contains the given extension, need to strip off so as to append absoluteFileName = getFileNameWithoutExtension(absoluteFileName); } for(int i=1; i<=numberOfMessages; i++) { String fileName = absoluteFileName + "_" + i; File file = new File(fileName); file = appendFileNameWithGivenExtension(file, extension, extensionIncludesDelimiter); resultList.add(file); } return resultList; } /** * Return the absolute file name without the trailing file extension; return absoluteFileName itself if it does not contain any extension. * @param absoluteFileName * @return the absolute file name without the trailing file extension; return absoluteFileName itself if it does not contain any extension. */ private static final String getFileNameWithoutExtension(String absoluteFileName) { if (absoluteFileName.startsWith(Config.CAADAPTER_HOME_DIR_TAG)) absoluteFileName = absoluteFileName.replace(Config.CAADAPTER_HOME_DIR_TAG, getWorkingDirPath()); if(absoluteFileName==null) { return absoluteFileName; } int extIndex = absoluteFileName.lastIndexOf("."); if(extIndex!=-1) { absoluteFileName = absoluteFileName.substring(0, extIndex); } return absoluteFileName; } /** * This function will return the file with the given extension. If it already contains, return immediately. * @param file * @param extension * @param extensionIncludesDelimiter * @return the File object contains the right file name with the given extension. */ public static final File appendFileNameWithGivenExtension(File file, String extension, boolean extensionIncludesDelimiter) { String extensionLocal = getFileExtension(file, extensionIncludesDelimiter); if(GeneralUtilities.areEqual(extensionLocal, extension)) {//already contains the given extension, return return file; } else { String newFileName = file.getAbsolutePath(); if(extensionIncludesDelimiter) { newFileName += extension; } else { newFileName += "." + extension; } File resultFile = new File(newFileName); return resultFile; } } /** * Create a temporary file which includes the received string parameter. * * @param string parameter which would like to be saved into this temporary file. * @return the temporary file name. this file will be automatically deleted when system exit in according to File.deleteOnExit(). * @throws IOException when saving is failed. */ public static String saveStringIntoTemporaryFile(String string) throws IOException // inserted by umkis 08/10/2006 { String tempFileName = getTemporaryFileName(); saveStringIntoTemporaryFile(tempFileName, string); return tempFileName; } /** * This function will dawnload data from a InputStream and save them into a file. * @param addr url address * @return the File object contains the right file name with the given extension. * @throws IOException Any Exception will be passed into IOException */ public static String downloadFromURLtoTempFile(String addr) throws IOException { if ((addr == null)||(addr.trim().equals(""))) throw new IOException("Null address."); URL ur = null; InputStream is = null; //FileOutputStream fos = null; addr = addr.trim(); String tempFile = ""; int idx = -1; for(int i=0;i<addr.length();i++) { String achar = addr.substring(i, i+1); if (achar.equals(".")) idx = i; } if (idx <= 0) tempFile = getTemporaryFileName(); else tempFile = getTemporaryFileName(addr.substring(idx)); try { ur = new URL(addr); } catch(MalformedURLException ue) { throw new IOException("Invalid URL : " + ue.getMessage()); } URLConnection uc = ur.openConnection(); try { uc.connect(); } catch(SocketTimeoutException se) { throw new IOException("SocketTimeoutException : " + se.getMessage()); } return downloadFromInputStreamToFile(uc.getInputStream(), tempFile); } /** * This function will dawnload data from a InputStream and save them into a file. * @param is InputStream * @param fileName file name - this file will be deleted when system exit. * @return the File object contains the right file name with the given extension. * @throws IOException Any Exception will be passed into IOException */ public static String downloadFromInputStreamToFile(InputStream is, String fileName) throws IOException { return downloadFromInputStreamToFile(is, fileName, true); } /** * This function will dawnload data from a InputStream and save them into a file. * @param is InputStream * @param fileName file name * @param deleteOnExit if true this file will be deleted when system exit. * @return the File object contains the right file name with the given extension. * @throws IOException Any Exception will be passed into IOException */ public static String downloadFromInputStreamToFile(InputStream is, String fileName, boolean deleteOnExit) throws IOException { if (is == null) throw new IOException("Null InputStream "); if ((fileName == null)||(fileName.trim().equals(""))) throw new IOException("Null File Name."); DataInputStream dis = new DataInputStream(is); FileOutputStream fos = null; DataOutputStream dos = null; byte bt = 0; boolean started = false; while(true) { try { bt = dis.readByte(); } catch(IOException ie) { break; } catch(NullPointerException ie) { break; } if (!started) { try { fos = new FileOutputStream(fileName); } catch(FileNotFoundException fe) { throw new IOException("FileNotFoundException : " + fe.getMessage()); } catch(SecurityException se) { throw new IOException("SecurityException : " + se.getMessage()); } dos = new DataOutputStream(fos); started = true; } dos.writeByte(bt); } if (fos == null) throw new IOException("This InputStream object is empty."); dis.close(); dos.close(); is.close(); fos.close(); if (deleteOnExit) setFileDeleteOnExit(fileName); return fileName; } /** * This makes parametered file delete when system exit. * @param fileName file name * @return true or false */ public static boolean setFileDeleteOnExit(String fileName) { if ((fileName == null)||(fileName.trim().equals(""))) return false; File file = new File(fileName); if (!file.exists()) return false; file.deleteOnExit(); return true; } public static String getPropertyFromComponentPropertyFile(String key) { return getPropertyFromComponentPropertyFile(null, key); } public static String getPropertyFromComponentPropertyFile(String propertyFile, String key) { if (key == null) return null; key = key.trim(); if (key.equals("")) return null; String result = ""; String path = ""; String name = ""; File file = null; if ((propertyFile == null)||(propertyFile.trim().equals(""))) { path = CaadapterUtil.getPathOfComponentPropertyFile(); name = CaadapterUtil.getNameOfComponentPropertyFile(); } else { file = new File(propertyFile); if ((!file.exists())||(!file.isFile())) return null; path = propertyFile; name = propertyFile; } InputStream fi = null; //appConfig=new HashMap(); //load caadapter component types to run Properties properties=new Properties(); try { File srcFile=new File(path); if ((srcFile.exists())&&(srcFile.isFile())) { //System.out.println("PP1 : " + path); fi =new FileInputStream(srcFile); } else { //System.out.println("PP2 : " + name); fi = CaadapterUtil.class.getClassLoader().getResource(name).openStream(); } properties.load(fi); if (properties == null) return null; //read the value for each component and add it into the ActivatedList Enumeration propKeys=properties.keys(); while (propKeys.hasMoreElements()) { String onePropKey=(String)propKeys.nextElement(); String onePropValue=(String)properties.getProperty(onePropKey); //System.out.println("Component Properties ("+path+") : " + onePropKey + " => " + onePropValue); if (onePropKey == null) continue; onePropKey = onePropKey.trim(); if (onePropKey.equals("")) continue; if (onePropKey.equals(key)) { result = onePropValue; //System.out.println(" *** This is the Key!!"); } } } catch (Exception ex) { return null; } finally { if (fi != null) try { fi.close(); } catch (IOException ignore) {} } if (result == null) return null; result = result.trim(); if (result.equals("")) return null; return result; } /** * Retrieve a resource URL: work for both standealone and Webstart deployment * @param rscName * @return URL */ public static URL retrieveResourceURL(String rscName) { URL rtnURL=null; System.out.println("FileUtil.retrieveResourceURL()..resourceName:"+rscName); rtnURL=Thread.currentThread().getClass().getResource("/"+rscName); System.out.println("FileUtil.retrieveResourceURL()..Thread.currentThread().getClass().getResource..standalone URL:/"+rscName+"="+rtnURL); if (rtnURL==null) { rtnURL=Thread.currentThread().getClass().getResource(rscName); System.out.println("FileUtil.retrieveResourceURL()..Thread.currentThread().getClass().getResource..standalone URL:"+rscName+"="+rtnURL); } //load resource for webstart deployment if (rtnURL==null) { rtnURL=FileUtil.class.getClassLoader().getResource(rscName); System.out.println("FileUtil.retrieveResourceURL()..FileUtil.class.getClassLoader().getResource..webstart URL:"+rscName+"="+rtnURL); if (rtnURL==null) { rtnURL=FileUtil.class.getClassLoader().getResource("/"+rscName); System.out.println("FileUtil.retrieveResourceURL()..FileUtil.class.getClassLoader().getResource..webstart URL:/"+rscName+"="+rtnURL); } } return rtnURL; } } /** * $Log: not supported by cvs2svn $ * Revision 1.31 2009/04/21 16:55:48 altturbo * update downloadFromURLtoTempFile() * * Revision 1.30 2009/04/17 14:24:20 wangeug * clean code:provide meaningful printout messages * * Revision 1.29 2009/04/02 06:45:30 altturbo * move getV3XsdFilePath() out to SchemaDirUtil.java * * Revision 1.28 2009/04/02 04:16:57 altturbo * modify getV3XsdFilePath() * * Revision 1.27 2009/04/02 04:10:23 altturbo * modify getV3XsdFilePath() * * Revision 1.26 2009/03/12 01:43:18 umkis * update filenameLocate() * * Revision 1.25 2009/03/10 01:28:32 umkis * minor change * * Revision 1.24 2009/03/09 20:21:49 umkis * minor change * * Revision 1.23 2009/03/09 18:10:31 umkis * add searchPropertyAsFilePath() and searchProperty() * * Revision 1.22 2009/03/09 18:02:29 umkis * add searchPropertyAsFilePath() and searchProperty() * * Revision 1.21 2009/02/25 15:56:25 wangeug * enable webstart * * Revision 1.20 2009/02/18 02:27:50 umkis * update filenameLocate() * * Revision 1.19 2008/12/12 22:01:30 umkis * add getV3XsdFilePath() and getPropertyFromComponentPropertyFile(String key) * * Revision 1.18 2008/10/21 21:07:50 umkis * update ODI to 2008 NE * * Revision 1.17 2008/06/09 19:53:50 phadkes * New license text replaced for all .java files. * * Revision 1.16 2008/05/30 01:00:40 umkis * update getV2ResourceMetaDataLoader() * * Revision 1.15 2008/05/29 00:30:56 umkis * add getV2ResourceMetaDataLoader() * * Revision 1.14 2008/05/22 15:59:47 umkis * add getV2ResourceMetaDataLoader(String) * * Revision 1.13 2008/05/22 15:33:42 umkis * add getV2ResourceMetaDataLoader() * * Revision 1.12 2008/04/01 21:06:46 umkis * minor change * * Revision 1.11 2007/11/16 17:17:34 wangeug * update SDTM module * * Revision 1.10 2007/09/24 20:05:28 umkis * Add v2 Meta data collector * * Revision 1.9 2007/09/20 22:41:01 umkis * no message * * Revision 1.8 2007/08/28 14:24:04 wangeug * clean code * * Revision 1.7 2007/08/28 13:58:51 wangeug * remove schemas folder from caAdapter.jar and set it under root directory: xxxx.xsd use relative path as "include" * * Revision 1.6 2007/08/09 01:56:52 umkis * add a feature that v2Meta directory creating when search the directory * * Revision 1.5 2007/08/08 23:05:48 umkis * update getV2DataDirPath() * * Revision 1.4 2007/07/14 20:16:02 umkis * add 'downloadFromInputStreamToFile()' * * Revision 1.3 2007/07/12 17:30:06 umkis * add 'getComponentsDirPath()' and directory paths of the componts. * * Revision 1.2 2007/07/09 15:39:58 umkis * Update for csv cardinality and test instance generating. * * Revision 1.1 2007/04/03 16:02:37 wangeug * initial loading of common module * * Revision 1.49 2006/12/28 20:50:36 umkis * saveValue() and readValue() in FunctionConstant * * Revision 1.48 2006/11/02 18:32:20 umkis * Some codes of the method 'downloadFromURLtoTempFile' was changed. * * Revision 1.47 2006/11/01 19:02:14 umkis * The method 'downloadFromURLtoTempFile' was added. * * Revision 1.46 2006/10/11 21:00:53 umkis * Change {caAdapter_Home} tag to absolute path name of the home directory. * * Revision 1.45 2006/09/19 18:06:11 umkis * add getV2DataDirPath() * * Revision 1.44 2006/09/18 18:32:46 umkis * change deleteTemporaryFiles() for checking that it was 10 minutes after generating the temp file. * * Revision 1.43 2006/08/10 20:32:02 umkis * isTemporaryFileName(String) was added. * * Revision 1.42 2006/08/10 20:11:29 umkis * Config.TEMPORARY_FILE_PREFIX is used for generate a temporary file name. * * Revision 1.41 2006/08/10 19:46:59 umkis * saveStringIntoTemporaryFile(String) and getRandomNumber(int) was added. * * Revision 1.40 2006/08/10 17:39:44 umkis * For more precisely distinguishing other temporary file, four dgits random number will be attached to filename. * * Revision 1.39 2006/08/09 22:53:48 umkis * getTemporayFileName(), deleteTemporaryFiles() and getETCDir() were added. * * Revision 1.38 2006/08/09 22:48:17 umkis * Just before closing mainFrame, all temporary files is deleted. * * Revision 1.37 2006/08/02 18:44:25 jiangsc * License Update * * Revision 1.36 2006/05/04 19:41:58 chene * Add 150003 test instance * * Revision 1.35 2006/01/03 19:16:53 jiangsc * License Update * * Revision 1.34 2006/01/03 18:56:26 jiangsc * License Update * * Revision 1.33 2005/12/30 22:23:30 chene * Update JavaDoc * * Revision 1.32 2005/12/29 23:06:16 jiangsc * Changed to latest project name. * * Revision 1.31 2005/12/29 15:39:06 chene * Optimize imports * * Revision 1.30 2005/12/14 21:29:40 giordanm * no message * * Revision 1.29 2005/11/02 22:36:06 chene * change "\\" to "/" * * Revision 1.28 2005/10/26 21:30:10 chene * Clean up e.printStackTrace() * * Revision 1.27 2005/10/26 17:33:13 giordanm * bug #129 * * Revision 1.26 2005/10/25 20:20:25 chene * Support Schema Location * * Revision 1.25 2005/10/20 18:26:58 jiangsc * Updated to point to the UI default example directory. * * Revision 1.24 2005/10/19 21:49:21 chene * creat new directory workingspace, move example directory to there * * Revision 1.23 2005/08/22 17:32:45 giordanm * change the file attribute within BaseComponent from a String to a File, this checkin also contains some refactor work to the FileUtil. * * Revision 1.22 2005/08/11 22:10:38 jiangsc * Open/Save File Dialog consolidation. * * Revision 1.21 2005/08/09 22:53:04 jiangsc * Save Point * * Revision 1.20 2005/08/08 18:05:50 giordanm * a bunch of checkins that changes hard coded paths to relative paths. * * Revision 1.19 2005/07/19 22:28:03 jiangsc * 1) Renamed FunctionalBox to FunctionBox to be consistent; * 2) Added SwingWorker to OpenObjectToDbMapAction; * 3) Save Point for Function Change. * * Revision 1.18 2005/06/24 20:58:08 jiangsc * Save Point * * Revision 1.17 2005/06/21 23:03:02 jiangsc * Put in new CSVPanel Implementation. * * Revision 1.16 2005/06/08 23:02:02 jiangsc * Implemented New UI. * * Revision 1.15 2005/06/02 22:12:02 chene * no message * * Revision 1.14 2005/05/17 20:07:16 chene * Updated CVS tag test * * Revision 1.13 2005/05/17 20:05:38 chene * Updated CVS tag test * * Revision 1.12 2005/05/17 17:33:07 giordanm * remove the <br> in the javadoc heading * * Revision 1.11 2005/05/17 17:15:45 giordanm * another minor change to the CVS javadoc comments. * * Revision 1.10 2005/05/17 17:01:20 giordanm * Playing around with CVS keywords / javadoc generation. * */
caadapter/components/common/src/gov/nih/nci/caadapter/common/util/FileUtil.java
/** * <!-- LICENSE_TEXT_START --> The contents of this file are subject to the caAdapter Software License (the "License"). You may obtain a copy of the License at the following location: [caAdapter Home Directory]\docs\caAdapter_license.txt, or at: http://ncicb.nci.nih.gov/infrastructure/cacore_overview/caadapter/indexContent/docs/caAdapter_License * <!-- LICENSE_TEXT_END --> */ package gov.nih.nci.caadapter.common.util; import edu.knu.medinfo.hl7.v2tree.HL7MessageTreeException; import edu.knu.medinfo.hl7.v2tree.MetaDataLoader; import gov.nih.nci.caadapter.common.Log; import gov.nih.nci.caadapter.common.function.DateFunction; import gov.nih.nci.caadapter.common.function.FunctionException; //import gov.nih.nci.caadapter.hl7.mif.NormativeVersionUtil; import javax.swing.*; import java.awt.*; import java.io.*; import java.net.MalformedURLException; import java.net.SocketTimeoutException; import java.net.URL; import java.net.URLConnection; import java.util.*; import java.util.List; import java.util.logging.FileHandler; /** * File related utility class * * @author OWNER: Matthew Giordano * @author LAST UPDATE $Author: altturbo $ * @version $Revision: 1.31 $ */ public class FileUtil { private static final String OUTPUT_DIR_NAME = "out"; private static File OUTPUT_DIR = null; private static File ODI_FILE = null; private static MetaDataLoader v2Loader = null; /** * Create the output directory if it doesn't exist. */ private static void setupOutputDir() { OUTPUT_DIR = new File(OUTPUT_DIR_NAME); if (!OUTPUT_DIR.isDirectory()) { OUTPUT_DIR.mkdir(); } } public static String getAssociatedFileAbsolutePath(String holderFile, String associatedFile) { if(associatedFile.indexOf(File.separator)>-1) return associatedFile; File holder=new File(holderFile); File associted=new File(associatedFile); if (!holder.exists()) return associatedFile; if (holder.isDirectory()) return associatedFile; String holderParent=holder.getParent(); String rntPath=holderParent+File.separator+associatedFile; return rntPath; } /** * Compare the name/absolute path of a holder file with its associated file * return the name of the assoicated file without parentPath if they have the same * parent, otherwise return the input value of the associated file * @param holderFile the name/absolutePath of a holder file * @param associatedFile the name/absolutePath of an associated file * @return name of the associated file to be used as reference from the holder file */ public static String getAssociatedFileRelativePath(String holderFile, String associatedFile) { File holder=new File(holderFile); File associted=new File(associatedFile); if (!holder.exists()) return associatedFile; if (holder.isDirectory()) return associatedFile; if (!associted.exists()) return associatedFile; if (associted.isDirectory()) return associatedFile; String holderParent=holder.getParent(); String associateParent=associted.getParent(); if (!holderParent.equals(associateParent)) return associatedFile; return associted.getName(); } /** * Create the output directory if necessary and return a reference to it. * * @return The output directory */ public static File getOutputDir() { FileUtil.setupOutputDir(); return OUTPUT_DIR; } public static String getWorkingDirPath() { File f = new File(""); return f.getAbsolutePath(); } public static String getComponentsDirPath() { return getWorkingDirPath() + File.separator + "components"; } public static String getCommonDirPath() { return getComponentsDirPath() + File.separator + "common"; } public static String getDataViewerDirPath() { return getComponentsDirPath() + File.separator + "dataviewer"; } public static String getHL7TransformationDirPath() { return getComponentsDirPath() + File.separator + "hl7Transformation"; } public static String getModelMappingDirPath() { return getComponentsDirPath() + File.separator + "modelMapping"; } public static String getSDTMTransformationDirPath() { return getComponentsDirPath() + File.separator + "sdtmTransformation"; } public static String getUserInterfaceDirPath() { return getComponentsDirPath() + File.separator + "userInterface"; } public static String getWebServicesDirPath() { return getComponentsDirPath() + File.separator + "webservices"; } public static String getETCDirPath() // inserted bt umkis 08/09/2006 { File f = new File("./etc"); return f.getAbsolutePath(); } public static String getExamplesDirPath() { File f = new File("./workingspace/examples"); return f.getAbsolutePath(); } public static String getV2DataDirPath() { File f = new File(getWorkingDirPath() + File.separator + "data" + File.separator + "v2Meta"); if ((!f.exists())||(!f.isDirectory())) f.mkdirs(); return f.getAbsolutePath(); //return getV2DataDirPath(null); } public static String getV2DataDirPath(Component parent) { File f = new File(getWorkingDirPath() + File.separator + "data" + File.separator + "v2Meta"); if ((!f.exists())||(!f.isDirectory())) { if (parent == null) return null; String display = "HL7 v2 meta Directory isn't created yet.\nPress 'Yes' button if you want to create directory.\nIt may takes some minutes."; //JOptionPane.showMessageDialog(parent, "Making V2 Meta Directory", display, JOptionPane.WARNING_MESSAGE); //System.out.println("CCCCV : " + display); int res = JOptionPane.showConfirmDialog(parent, display, "Create v2 Meta Directory", JOptionPane.YES_NO_OPTION, JOptionPane.INFORMATION_MESSAGE); if (res != JOptionPane.YES_OPTION) return ""; ClassLoaderUtil loaderUtil = null; try { loaderUtil = new ClassLoaderUtil("v2Meta"); } catch(IOException ie) { JOptionPane.showMessageDialog(parent, ie.getMessage() + ".\n Check the resourceV2.zip file in the library.", "Creating V2 Meta Directory failure", JOptionPane.WARNING_MESSAGE); //System.err.println("Make V2 Meta Directory : " + ie.getMessage()); return null; } for(int i=0;i<loaderUtil.getSizeOfFiles();i++) { String path = loaderUtil.getPath(i); if (!path.trim().toLowerCase().endsWith(".dat")) continue; path = path.replace("/", File.separator); path = getWorkingDirPath() + File.separator + "data" + File.separator + path; int index = -1; for (int j=path.length();j>0;j--) { String achar = path.substring(j-1, j); if (achar.equals(File.separator)) { index = j; break; } } //System.out.println("V2 Meta : " + path); if (index <= 0) { JOptionPane.showMessageDialog(parent, "V2 Meta file is invalid : " + path, "Creating V2 Meta Directory failure", JOptionPane.WARNING_MESSAGE); //System.err.println("V2 Meta file is invalid : " + path); return null; } File dir = new File(path.substring(0,(index-1))); if ((!dir.exists())||(!dir.isDirectory())) { if (!dir.mkdirs()) { JOptionPane.showMessageDialog(parent, "V2 Meta directory making failure : " + dir, "Creating V2 Meta Directory failure", JOptionPane.WARNING_MESSAGE); //System.err.println("V2 Meta directory making failure : " + dir); return null; } } File datFile = new File(loaderUtil.getFileName(i)); if ((!datFile.exists())||(!datFile.isFile())) { JOptionPane.showMessageDialog(parent, "Not Found This V2 Meta temporary file : " + path, "Creating V2 Meta Directory failure", JOptionPane.WARNING_MESSAGE); continue; } if (!datFile.renameTo(new File(path))) System.err.println("V2 Meta rename failure : " + path); } } return f.getAbsolutePath(); } public static String searchPropertyAsFilePath(String key) { return searchProperty(null, key, false, true); } public static String searchProperty(String key) { return searchProperty(null, key, false, false); } public static String searchProperty(String key, boolean useProperty) { return searchProperty(null, key, useProperty, false); } private static String searchProperty(File dir, String key, boolean useProperty, boolean isFilePath) { File sDir = null; if (dir == null) sDir = new File(getWorkingDirPath()); else { if (!dir.isDirectory()) return null; sDir = dir; } String res = null; File[] files = sDir.listFiles(); for(File file:files) { String fName = file.getName(); if (file.isFile()) { if ((fName.toLowerCase().endsWith(".properties"))|| (fName.toLowerCase().endsWith(".property"))) {} else continue; if (useProperty) { res = getPropertyFromComponentPropertyFile(file.getAbsolutePath(), key); if (res != null) { if (isFilePath) { String path = checkValidFilePath(res); if (path != null) return path; } else return res; } continue; } List<String> list = null; try { list = readFileIntoList(file.getAbsolutePath()); } catch(IOException ie) { continue; } for (String line:list) { line = line.trim(); if (line.startsWith("#")) continue; if (line.startsWith("!")) continue; int idx = line.indexOf("="); if (idx < 0) idx = line.indexOf(":"); if (idx <= 0) continue; String keyS = line.substring(0, idx).trim(); if (!keyS.equals(key.trim())) continue; res = line.substring(idx+1).trim(); if (isFilePath) { String path = checkValidFilePath(res); if (path != null) return path; } else return res; } } else if (file.isDirectory()) { if ((fName.equalsIgnoreCase("conf"))|| (fName.equalsIgnoreCase("etc"))) res = searchProperty(file, key, useProperty, isFilePath); if (res != null) return res; } } res = null; if (sDir.getName().equalsIgnoreCase("dist")) res = searchProperty(sDir.getParentFile(), key, useProperty, isFilePath); if (res != null) return res; return null; } public static String searchFile(String fileName) { return searchFile(null, fileName); } public static String searchFile(File dir, String fileName) { if (fileName == null) return null; fileName = fileName.trim(); if (fileName.equals("")) return null; if (fileName.endsWith(File.separator)) fileName = fileName.substring(0, fileName.length()-File.separator.length()); if (fileName.endsWith("/")) fileName = fileName.substring(0, fileName.length()-1); while(true) { int idx = fileName.indexOf(File.separator); int len = 0; if (idx >= 0) len = File.separator.length(); else { idx = fileName.indexOf("/"); if (idx >= 0) len = 1; } if (idx < 0) break; fileName = fileName.substring(idx + len); } if (dir == null) { File wDir = new File(getWorkingDirPath()); if (wDir.getName().equals("dist")) dir = wDir.getParentFile(); else dir = wDir; } if ((!dir.exists())||(!dir.isDirectory())) return null; File[] files = dir.listFiles(); for(File file:files) { if (file.getName().equals(fileName)) return file.getAbsolutePath(); if (file.isDirectory()) { String res = searchFile(file, fileName); if (res != null) return res; } } return null; } public static String checkValidFilePath(String data) { if (data == null) return null; data = data.trim(); if (data.equals("")) return null; while(true) { int idx = data.indexOf("\\\\"); if (idx < 0) break; data = data.substring(0, idx) + data.substring(idx + 1); } File file = new File(data); if (file.exists()) return file.getAbsolutePath(); return null; } /* public static String getV3XsdFilePath() { String schemaPath= NormativeVersionUtil.getCurrentMIFIndex().getSchemaPath(); File f = new File(schemaPath); if (!f.exists()) f = new File("../" + schemaPath); if (!f.exists()) { System.err.println("Not Found V3 XSD Directory..."); return null; } if (f.isDirectory()) return f.getAbsolutePath(); String parent = f.getParent(); if (!parent.endsWith(File.separator)) parent = parent + File.separator; File sdir = new File(parent + "schemas"); if ((sdir.exists())&&(sdir.isDirectory())) return sdir.getAbsolutePath(); System.err.println("Not Found V3 XSD Directory..."); return null; } */ public static MetaDataLoader getV2ResourceMetaDataLoader() { return getV2ResourceMetaDataLoader(null); } public static MetaDataLoader getV2ResourceMetaDataLoader(String resourceFile) { if (resourceFile == null) resourceFile = ""; else resourceFile = resourceFile.trim(); //if (v2Loader == null) System.out.println("CCC v3 meta loader (1) : " + resourceFile); //else System.out.println("CCC v3 meta loader (2) : " + v2Loader.getPath() + ", " + resourceFile); if (!resourceFile.equals("")) { MetaDataLoader loader = null; try { loader = new MetaDataLoader(resourceFile); } catch(HL7MessageTreeException he) { System.out.println("HL7MessageTreeException : " + he.getMessage()); return null; } v2Loader = loader; return loader; } if (v2Loader == null) { String name = "v2Meta"; Enumeration<URL> fileURLs = null; try { fileURLs= ClassLoader.getSystemResources(name); } catch(IOException ie) { System.out.println("IOException #1 : " + ie.getMessage()); } if (fileURLs == null) { System.out.println("ClassLoader Result : " + name + " : Not Found"); return null; } //System.out.println("Number of Result : " + fileURLs.toString()); boolean found = false; while(fileURLs.hasMoreElements()) { URL fileURL = fileURLs.nextElement(); String url = fileURL.toString(); if ((url.toLowerCase().startsWith("jar:"))||(url.toLowerCase().startsWith("zip:"))) { int idx = url.indexOf("!"); if (idx < 0) { System.err.println("Invalid jar file url : " + url); continue; } String jarFileName = url.substring(4, idx); try { v2Loader = new MetaDataLoader(jarFileName); found = true; } catch(HL7MessageTreeException he) { continue; } } if ((found)&&(v2Loader != null)) return v2Loader; } v2Loader = null; return null; } else return v2Loader; } /** * Copied from javaSig code * SInce 2005 Normative Edition, the name convention is changed. It has to following * HL7 v3 artifact naming convention: {UUDD_AAnnnnnnUVnn}. * - UU = Sub-Section code * - DD = Domain code * - AA = Artifact or Document code. * (Message Type will be MT) - nnnnnn = Six digit zero-filled number * - UV = Universal * - nn = ballot version * * Since the ballot version is different, starting from 01 to unknown, this function find related file * by guessing the version number * * THIS METHOD IS NOT USED BY JAVASIG CODE ANY MORE. IF IT WERE, IT WOULD HAVE TO BE * CONVERTED TO NOT USER Files BUT Resources INSTEAD. * TestParseAndBuild uses it, but that's O.K. Just those tests will eventually fail. * * * @param messageType * @param fileExtension * @return File Name */ public static String searchMessageTypeSchemaFileName(String messageType, String fileExtension) throws FileNotFoundException { String schemaFileNamePath =""; for (int i = -1; i < 100; i++) { String pad = ""; if (i < 0) pad = ""; else pad = i < 10 ? "UV0" + i : "UV" + String.valueOf(i); String schemaFileName = Config.SCHEMA_LOCATION+messageType+pad + "." + fileExtension; schemaFileNamePath=FileUtil.getWorkingDirPath() + File.separator + schemaFileName; File file = new File(schemaFileNamePath); if ((file.exists())&&(file.isFile())) return schemaFileName; // return file.getAbsolutePath(); URL fileURL= ClassLoader.getSystemResource(schemaFileName); if (fileURL!=null) return schemaFileName; // return fileURL.getFile(); } //Throw exception since file is not found.... throw new FileNotFoundException("File Directory:" + Config.SCHEMA_LOCATION + " Message Type:" + messageType + " File Extenstion:" + fileExtension + ", "+schemaFileNamePath); } /** * Return a convenient UI Working Directory, which may or may not be the same as the value from getWorkingDirPath(). * @return a convenient UI Working Directory, which may or may not be the same as the value from getWorkingDirPath(). */ public static String getUIWorkingDirectoryPath() { File f = new File("./workingspace"); if ((!f.exists())||(!f.isDirectory())) { f.mkdirs(); } return f.getAbsolutePath(); } /** * Generat a Temporary File Name at workingspace directory. * @return a Temporary File Name. */ public static String getTemporaryFileName() // inserted by umkis 08/09/2006 { return getTemporaryFileName(Config.TEMPORARY_FILE_EXTENSION); } /** * Generat a Temporary File Name at workingspace directory. * @param extension the extention of generated temp file * @return a Temporary File Name. */ public static String getTemporaryFileName(String extension) // inserted by umkis 08/09/2006 { DateFunction dateFunction = new DateFunction(); String dateFormat = dateFunction.getDefaultDateFormatString(); if (!dateFormat.endsWith("SSS")) dateFormat = dateFormat + "SSS"; try { return getUIWorkingDirectoryPath() + File.separator + Config.TEMPORARY_FILE_PREFIX + (new DateFunction()).getCurrentTime(dateFormat) + "_" + getRandomNumber(4) + extension; } catch(FunctionException fe) { return getUIWorkingDirectoryPath() + File.separator + Config.TEMPORARY_FILE_PREFIX + (new DateFunction()).getCurrentTime() + "_" + getRandomNumber(4) + extension; } } /** * Check the parameter whether a temporary file name or not. * * @param fileName * @return true if a temporary file name, else is false. */ public static boolean isTemporaryFileName(String fileName) // inserted by umkis 08/10/2006 { if (fileName.length() > 1024) return false; if ( //(fileName.endsWith(Config.TEMPORARY_FILE_EXTENSION)) && (fileName.indexOf(Config.TEMPORARY_FILE_PREFIX) >= 0) ) return true; else return false; } /** * Create a temporary file which includes the received string parameter. * * @param tempFileName file name of this temporary file. * @param string parameter which would like to be saved into this temporary file. * @throws IOException when saving is failed. */ public static void saveStringIntoTemporaryFile(String tempFileName, String string) throws IOException // inserted by umkis 12/26/2006 { FileWriter fw = null; File file = null; try { fw = new FileWriter(tempFileName); fw.write(string); fw.close(); file = new File(tempFileName); } catch(Exception ie) { throw new IOException("File Writing Error(" + tempFileName + ") : " + ie.getMessage() + ", value : " + string); } file.deleteOnExit(); } public static List<String> readFileIntoList(String fileName) throws IOException { List<String> list = new ArrayList<String>(); FileReader fr = null; try { fr = new FileReader(fileName); } catch(FileNotFoundException fe) { throw new IOException("FileNotFoundException in FileUtil.readFileIntoList() : " + fileName); } BufferedReader br = new BufferedReader(fr); String readLineOfFile = ""; try { while((readLineOfFile=br.readLine())!=null) list.add(readLineOfFile); } catch(IOException ie) { throw new IOException("File reading Error in FileUtil.readFileIntoList() : " + fileName); } try { fr.close(); br.close(); } catch(IOException ie) { throw new IOException("File Closing Error in FileUtil.readFileIntoList() : " + fileName); } return list; } public static String readFileIntoStringAllowException(String fileName) throws IOException { List<String> list = null; list = readFileIntoList(fileName); String output = ""; for(int i=0;i<list.size();i++) output = output + list.get(i) + "\r\n"; return output.trim(); } public static String readFileIntoString(String fileName) { String out = ""; try { out = readFileIntoStringAllowException(fileName); } catch(IOException ie) { return null; } return out; } public static String findODIWithDomainName(String str) throws IOException { if ((str == null)||(str.trim().equals(""))) return ""; if (ODI_FILE == null) { ClassLoaderUtil loaderUtil = new ClassLoaderUtil("instanceGen/HL7_ODI.csv"); if (loaderUtil.getFileNames().size() == 0) throw new IOException("HL7_ODI.csv file class loading failure."); ODI_FILE = new File(loaderUtil.getFileNames().get(0)); ODI_FILE.deleteOnExit(); } FileReader fr = null; //String fileName = ODI_FILE_NAME; try { fr = new FileReader(ODI_FILE); } catch(FileNotFoundException fe) { throw new IOException("ODI File : FileNotFoundException in FileUtil.readFileIntoList() : " + ODI_FILE.getName()); } BufferedReader br = new BufferedReader(fr); String readLineOfFile = ""; String result = ""; try { while((readLineOfFile=br.readLine())!=null) { //System.out.println("CCCYYYY : " + readLineOfFile); if ((readLineOfFile.startsWith("1."))||(readLineOfFile.startsWith("2."))||(readLineOfFile.startsWith("3."))||(readLineOfFile.startsWith("4."))) {} else continue; StringTokenizer st = new StringTokenizer(readLineOfFile, ","); int n = 0; String odi = ""; String domainName = ""; while(st.hasMoreTokens()) { if (n == 0) odi = st.nextToken().trim(); if (n == 1) domainName = st.nextToken().trim(); if (n == 2) break; n++; } //System.out.println("CCCXX : " + str + ", " + domainName + ", " + odi); if (str.trim().equals(domainName)) { result = odi; break; } } } catch(IOException ie) { throw new IOException("ODI File reading Error in FileUtil.readFileIntoList() : " + ODI_FILE.getName()); } try { fr.close(); br.close(); } catch(IOException ie) { throw new IOException("ODI File Closing Error in FileUtil.readFileIntoList() : " + ODI_FILE.getName()); } return result; } /** * Create a random integer number with digit number which was given by the caller. * For example, when digit number is 5, return value can be 34562 or 98123. * @param digit number of generated random number * @return generated random number */ public static int getRandomNumber(int digit) // inserted by umkis 08/10/2006 { if (digit <= 0) return 0; int in = 1; int la = 0; int sa = 0; for(int x=0;x<digit;x++) { in = in * 10; } la = in; sa = la / 10; return getRandomNumber(sa, la); } /** * Create a random integer number between max and min number which was given by the caller. * @param min : number of generated minimum random number * @param max : number of generated maximum random number * @return generated random number */ public static int getRandomNumber(int min, int max) // inserted by umkis 06/13/2007 { if (min == max) return max; if (min > max) { int t = max; max = min; min = t; } Random rnd = new Random(); int in = 0; int in1 = 0; if (max <= 0) { int min1 = 0 - max; int max1 = 0 - min; while(true) { in = rnd.nextInt(); in1 = in % max1; if (in1 >= min1) break; } in1 = 0 - in1; } else { while(true) { in = rnd.nextInt(); in1 = in % max; if (in1 >= min) break; } } return in1; } /** * Delete a lck file from the output directory. A lck file is a temporary file that is * created by the logger. * * @param filename */ public static void deleteLckFile(String filename) { File lckFile = new File(FileUtil.getOutputDir().getAbsolutePath() + File.separator + filename + ".lck"); if (lckFile != null && lckFile.delete()) { // do nothing } else { // lck file couldn't be deleted. } } public static String outputFile(String filename, String data) throws IOException { String fileName = FileUtil.getOutputDir().getAbsolutePath() + File.separator + filename; FileWriter out = new FileWriter(fileName); out.write(data); out.flush(); out.close(); return fileName; } /** * Create a filehandler to a log file that is located in the output directory. * * @param fileName the log that you want to create * @return the filehandler * @throws IOException */ public static FileHandler getLogFileHandle(String fileName) throws IOException { return new FileHandler(FileUtil.getOutputDir().getAbsolutePath() + File.separator + fileName); } /** * Search for a file by searching the classpath * (calling ClassLoader.getSystemResource()). * * @param fileName Name of the file you are looking for. * @return the path to the file * @throws FileNotFoundException */ public static String fileLocateOnClasspath(String fileName) throws FileNotFoundException { if (fileName.startsWith(Config.CAADAPTER_HOME_DIR_TAG)) fileName = fileName.replace(Config.CAADAPTER_HOME_DIR_TAG, getWorkingDirPath()); File f = new File(fileName); if (f.exists()) { return f.getAbsolutePath(); } URL u = null; u = ClassLoader.getSystemResource(fileName); if (u == null) { throw new FileNotFoundException(fileName + " - make sure the file is on the classpath."); } else { return u.getFile(); } } public static File fileLocate(String directory, String fileName)throws FileNotFoundException{ return new File(FileUtil.filenameLocate(directory,fileName)); } /** * Search for a file at the specified location and if it's not * found there look on the classpath by calling filenameLocate(fileName). * * @param directory the directory to look first * @param fileName the name fo the file * @return the path to the file * @throws FileNotFoundException */ public static String filenameLocate(String directory, String fileName) throws FileNotFoundException { if ((fileName == null)||(fileName.trim().equals(""))) throw new FileNotFoundException("Null file name..."); else fileName = fileName.trim(); if (fileName.startsWith(Config.CAADAPTER_HOME_DIR_TAG)) fileName = fileName.replace(Config.CAADAPTER_HOME_DIR_TAG, getWorkingDirPath()); // check just the filename File f = new File(fileName); if (f.exists()) { return f.getAbsolutePath(); } if ((directory == null)||(directory.trim().equals(""))) throw new FileNotFoundException("Null Dirctory..."); else directory = directory.trim(); if (directory.startsWith(Config.CAADAPTER_HOME_DIR_TAG)) directory = directory.replace(Config.CAADAPTER_HOME_DIR_TAG, getWorkingDirPath()); if (!directory.endsWith(File.separator)) directory = directory + File.separator; // check directory + filename f = new File(directory + fileName); if (f.exists()) { return f.getAbsolutePath(); } String temp = fileName; if (fileName.endsWith(File.separator)) fileName = fileName.substring(0, fileName.length()-File.separator.length()); if (fileName.endsWith("/")) fileName = fileName.substring(0, fileName.length()-1); while(true) { int idx = fileName.indexOf(File.separator); int len = 0; if (idx >= 0) len = File.separator.length(); else { idx = fileName.indexOf("/"); if (idx >= 0) len = 1; } if (idx < 0) break; fileName = fileName.substring(idx + len); } f = new File(directory + fileName); if (f.exists()) { //System.out.println("DDD : " + temp +" ; "+ fileName +" ; "+ f.getAbsolutePath()); return f.getAbsolutePath(); } fileName = temp; String fileLocation = null; try { fileLocation = fileLocateOnClasspath(fileName); } catch (FileNotFoundException fnfe) { throw new FileNotFoundException(fileName + " - make sure " + "the location is correct OR the file is on the classpath"); } return fileLocation; } public static String getLogFilename(String fullFilename) { if (fullFilename.startsWith(Config.CAADAPTER_HOME_DIR_TAG)) fullFilename = fullFilename.replace(Config.CAADAPTER_HOME_DIR_TAG, getWorkingDirPath()); String justFileName = new File(fullFilename).getName(); return justFileName + ".log"; } /** * Return the extension part given file name. * For example, if the name of the file is "foo.bar", ".bar" will be returned * if includeDelimiter is true, or "bar" will be returned if includeDelimiter is false; * otherwise, if no extension is specified in the file name, empty string is * returned instead of null. * * @param file * @param includeDelimiter * @return the extension or an empty string if nothing is found */ public static final String getFileExtension(File file, boolean includeDelimiter) { String result = ""; if (file != null) { String absoluteName = file.getAbsolutePath(); if (absoluteName != null) { int delimIndex = absoluteName.lastIndexOf("."); if (delimIndex != -1) {//include the . delimiter if (!includeDelimiter) {//skip the . delimiter delimIndex++; } result = absoluteName.substring(delimIndex); } } } return result; } /** * Construct a list of V3 Message file names and return. * @param userSpecifiedFile * @param numberOfMessages * @param extension * @param extensionIncludesDelimiter * @return a list of V3 Message file names. */ public static final java.util.List<java.io.File> constructHL7V3MessageFileNames(File userSpecifiedFile, int numberOfMessages, String extension, boolean extensionIncludesDelimiter) { java.util.List<File> resultList = new ArrayList<File>(); if(userSpecifiedFile==null) { Log.logWarning(FileUtil.class, "constructHL7V3MessageFileNames(): user specified file is null."); return resultList; } String extensionLocal = getFileExtension(userSpecifiedFile, extensionIncludesDelimiter); String absoluteFileName = userSpecifiedFile.getAbsolutePath(); if(GeneralUtilities.areEqual(extensionLocal, extension)) {//already contains the given extension, need to strip off so as to append absoluteFileName = getFileNameWithoutExtension(absoluteFileName); } for(int i=1; i<=numberOfMessages; i++) { String fileName = absoluteFileName + "_" + i; File file = new File(fileName); file = appendFileNameWithGivenExtension(file, extension, extensionIncludesDelimiter); resultList.add(file); } return resultList; } /** * Return the absolute file name without the trailing file extension; return absoluteFileName itself if it does not contain any extension. * @param absoluteFileName * @return the absolute file name without the trailing file extension; return absoluteFileName itself if it does not contain any extension. */ private static final String getFileNameWithoutExtension(String absoluteFileName) { if (absoluteFileName.startsWith(Config.CAADAPTER_HOME_DIR_TAG)) absoluteFileName = absoluteFileName.replace(Config.CAADAPTER_HOME_DIR_TAG, getWorkingDirPath()); if(absoluteFileName==null) { return absoluteFileName; } int extIndex = absoluteFileName.lastIndexOf("."); if(extIndex!=-1) { absoluteFileName = absoluteFileName.substring(0, extIndex); } return absoluteFileName; } /** * This function will return the file with the given extension. If it already contains, return immediately. * @param file * @param extension * @param extensionIncludesDelimiter * @return the File object contains the right file name with the given extension. */ public static final File appendFileNameWithGivenExtension(File file, String extension, boolean extensionIncludesDelimiter) { String extensionLocal = getFileExtension(file, extensionIncludesDelimiter); if(GeneralUtilities.areEqual(extensionLocal, extension)) {//already contains the given extension, return return file; } else { String newFileName = file.getAbsolutePath(); if(extensionIncludesDelimiter) { newFileName += extension; } else { newFileName += "." + extension; } File resultFile = new File(newFileName); return resultFile; } } /** * Create a temporary file which includes the received string parameter. * * @param string parameter which would like to be saved into this temporary file. * @return the temporary file name. this file will be automatically deleted when system exit in according to File.deleteOnExit(). * @throws IOException when saving is failed. */ public static String saveStringIntoTemporaryFile(String string) throws IOException // inserted by umkis 08/10/2006 { String tempFileName = getTemporaryFileName(); saveStringIntoTemporaryFile(tempFileName, string); return tempFileName; } /** * This function will dawnload data from a InputStream and save them into a file. * @param addr url address * @return the File object contains the right file name with the given extension. * @throws IOException Any Exception will be passed into IOException */ public static String downloadFromURLtoTempFile(String addr) throws IOException { if ((addr == null)||(addr.trim().equals(""))) throw new IOException("Null address."); URL ur = null; InputStream is = null; //FileOutputStream fos = null; addr = addr.trim(); String tempFile = ""; int idx = -1; for(int i=0;i<addr.length();i++) { String achar = addr.substring(i, i+1); if (achar.equals(".")) idx = i; } if (idx <= 0) tempFile = getTemporaryFileName(); else tempFile = getTemporaryFileName(addr.substring(idx)); try { ur = new URL(addr); } catch(MalformedURLException ue) { throw new IOException("Invalid URL : " + ue.getMessage()); } URLConnection uc = ur.openConnection(); try { uc.connect(); } catch(SocketTimeoutException se) { throw new IOException("SocketTimeoutException : " + se.getMessage()); } return downloadFromInputStreamToFile(uc.getInputStream(), tempFile); } /** * This function will dawnload data from a InputStream and save them into a file. * @param is InputStream * @param fileName file name - this file will be deleted when system exit. * @return the File object contains the right file name with the given extension. * @throws IOException Any Exception will be passed into IOException */ public static String downloadFromInputStreamToFile(InputStream is, String fileName) throws IOException { return downloadFromInputStreamToFile(is, fileName, true); } /** * This function will dawnload data from a InputStream and save them into a file. * @param is InputStream * @param fileName file name * @param deleteOnExit if true this file will be deleted when system exit. * @return the File object contains the right file name with the given extension. * @throws IOException Any Exception will be passed into IOException */ public static String downloadFromInputStreamToFile(InputStream is, String fileName, boolean deleteOnExit) throws IOException { if (is == null) throw new IOException("Null InputStream "); if ((fileName == null)||(fileName.trim().equals(""))) throw new IOException("Null File Name."); DataInputStream dis = new DataInputStream(is); FileOutputStream fos = null; DataOutputStream dos = null; byte bt = 0; boolean started = false; while(true) { try { bt = dis.readByte(); } catch(IOException ie) { break; } catch(NullPointerException ie) { break; } if (!started) { try { fos = new FileOutputStream(fileName); } catch(FileNotFoundException fe) { throw new IOException("FileNotFoundException : " + fe.getMessage()); } catch(SecurityException se) { throw new IOException("SecurityException : " + se.getMessage()); } dos = new DataOutputStream(fos); started = true; } dos.writeByte(bt); } if (fos == null) throw new IOException("This InputStream object is empty."); dis.close(); dos.close(); is.close(); fos.close(); if (deleteOnExit) setFileDeleteOnExit(fileName); return fileName; } /** * This makes parametered file delete when system exit. * @param fileName file name * @return true or false */ public static boolean setFileDeleteOnExit(String fileName) { if ((fileName == null)||(fileName.trim().equals(""))) return false; File file = new File(fileName); if (!file.exists()) return false; file.deleteOnExit(); return true; } public static String getPropertyFromComponentPropertyFile(String key) { return getPropertyFromComponentPropertyFile(null, key); } public static String getPropertyFromComponentPropertyFile(String propertyFile, String key) { if (key == null) return null; key = key.trim(); if (key.equals("")) return null; String result = ""; String path = ""; String name = ""; File file = null; if ((propertyFile == null)||(propertyFile.trim().equals(""))) { path = CaadapterUtil.getPathOfComponentPropertyFile(); name = CaadapterUtil.getNameOfComponentPropertyFile(); } else { file = new File(propertyFile); if ((!file.exists())||(!file.isFile())) return null; path = propertyFile; name = propertyFile; } InputStream fi = null; //appConfig=new HashMap(); //load caadapter component types to run Properties properties=new Properties(); try { File srcFile=new File(path); if ((srcFile.exists())&&(srcFile.isFile())) { //System.out.println("PP1 : " + path); fi =new FileInputStream(srcFile); } else { //System.out.println("PP2 : " + name); fi = CaadapterUtil.class.getClassLoader().getResource(name).openStream(); } properties.load(fi); if (properties == null) return null; //read the value for each component and add it into the ActivatedList Enumeration propKeys=properties.keys(); while (propKeys.hasMoreElements()) { String onePropKey=(String)propKeys.nextElement(); String onePropValue=(String)properties.getProperty(onePropKey); //System.out.println("Component Properties ("+path+") : " + onePropKey + " => " + onePropValue); if (onePropKey == null) continue; onePropKey = onePropKey.trim(); if (onePropKey.equals("")) continue; if (onePropKey.equals(key)) { result = onePropValue; //System.out.println(" *** This is the Key!!"); } } } catch (Exception ex) { return null; } finally { if (fi != null) try { fi.close(); } catch (IOException ignore) {} } if (result == null) return null; result = result.trim(); if (result.equals("")) return null; return result; } /** * Retrieve a resource URL: work for both standealone and Webstart deployment * @param rscName * @return */ public static URL retrieveResourceURL(String rscName) { URL rtnURL=null; System.out.println("FileUtil.retrieveResourceURL()..resourceName:"+rscName); rtnURL=Thread.currentThread().getClass().getResource("/"+rscName); System.out.println("FileUtil.retrieveResourceURL()..Thread.currentThread().getClass().getResource..standalone URL:/"+rscName+"="+rtnURL); if (rtnURL==null) { rtnURL=Thread.currentThread().getClass().getResource(rscName); System.out.println("FileUtil.retrieveResourceURL()..Thread.currentThread().getClass().getResource..standalone URL:"+rscName+"="+rtnURL); } //load resource for webstart deployment if (rtnURL==null) { rtnURL=FileUtil.class.getClassLoader().getResource(rscName); System.out.println("FileUtil.retrieveResourceURL()..FileUtil.class.getClassLoader().getResource..webstart URL:"+rscName+"="+rtnURL); if (rtnURL==null) { rtnURL=FileUtil.class.getClassLoader().getResource("/"+rscName); System.out.println("FileUtil.retrieveResourceURL()..FileUtil.class.getClassLoader().getResource..webstart URL:/"+rscName+"="+rtnURL); } } return rtnURL; } } /** * $Log: not supported by cvs2svn $ * Revision 1.30 2009/04/17 14:24:20 wangeug * clean code:provide meaningful printout messages * * Revision 1.29 2009/04/02 06:45:30 altturbo * move getV3XsdFilePath() out to SchemaDirUtil.java * * Revision 1.28 2009/04/02 04:16:57 altturbo * modify getV3XsdFilePath() * * Revision 1.27 2009/04/02 04:10:23 altturbo * modify getV3XsdFilePath() * * Revision 1.26 2009/03/12 01:43:18 umkis * update filenameLocate() * * Revision 1.25 2009/03/10 01:28:32 umkis * minor change * * Revision 1.24 2009/03/09 20:21:49 umkis * minor change * * Revision 1.23 2009/03/09 18:10:31 umkis * add searchPropertyAsFilePath() and searchProperty() * * Revision 1.22 2009/03/09 18:02:29 umkis * add searchPropertyAsFilePath() and searchProperty() * * Revision 1.21 2009/02/25 15:56:25 wangeug * enable webstart * * Revision 1.20 2009/02/18 02:27:50 umkis * update filenameLocate() * * Revision 1.19 2008/12/12 22:01:30 umkis * add getV3XsdFilePath() and getPropertyFromComponentPropertyFile(String key) * * Revision 1.18 2008/10/21 21:07:50 umkis * update ODI to 2008 NE * * Revision 1.17 2008/06/09 19:53:50 phadkes * New license text replaced for all .java files. * * Revision 1.16 2008/05/30 01:00:40 umkis * update getV2ResourceMetaDataLoader() * * Revision 1.15 2008/05/29 00:30:56 umkis * add getV2ResourceMetaDataLoader() * * Revision 1.14 2008/05/22 15:59:47 umkis * add getV2ResourceMetaDataLoader(String) * * Revision 1.13 2008/05/22 15:33:42 umkis * add getV2ResourceMetaDataLoader() * * Revision 1.12 2008/04/01 21:06:46 umkis * minor change * * Revision 1.11 2007/11/16 17:17:34 wangeug * update SDTM module * * Revision 1.10 2007/09/24 20:05:28 umkis * Add v2 Meta data collector * * Revision 1.9 2007/09/20 22:41:01 umkis * no message * * Revision 1.8 2007/08/28 14:24:04 wangeug * clean code * * Revision 1.7 2007/08/28 13:58:51 wangeug * remove schemas folder from caAdapter.jar and set it under root directory: xxxx.xsd use relative path as "include" * * Revision 1.6 2007/08/09 01:56:52 umkis * add a feature that v2Meta directory creating when search the directory * * Revision 1.5 2007/08/08 23:05:48 umkis * update getV2DataDirPath() * * Revision 1.4 2007/07/14 20:16:02 umkis * add 'downloadFromInputStreamToFile()' * * Revision 1.3 2007/07/12 17:30:06 umkis * add 'getComponentsDirPath()' and directory paths of the componts. * * Revision 1.2 2007/07/09 15:39:58 umkis * Update for csv cardinality and test instance generating. * * Revision 1.1 2007/04/03 16:02:37 wangeug * initial loading of common module * * Revision 1.49 2006/12/28 20:50:36 umkis * saveValue() and readValue() in FunctionConstant * * Revision 1.48 2006/11/02 18:32:20 umkis * Some codes of the method 'downloadFromURLtoTempFile' was changed. * * Revision 1.47 2006/11/01 19:02:14 umkis * The method 'downloadFromURLtoTempFile' was added. * * Revision 1.46 2006/10/11 21:00:53 umkis * Change {caAdapter_Home} tag to absolute path name of the home directory. * * Revision 1.45 2006/09/19 18:06:11 umkis * add getV2DataDirPath() * * Revision 1.44 2006/09/18 18:32:46 umkis * change deleteTemporaryFiles() for checking that it was 10 minutes after generating the temp file. * * Revision 1.43 2006/08/10 20:32:02 umkis * isTemporaryFileName(String) was added. * * Revision 1.42 2006/08/10 20:11:29 umkis * Config.TEMPORARY_FILE_PREFIX is used for generate a temporary file name. * * Revision 1.41 2006/08/10 19:46:59 umkis * saveStringIntoTemporaryFile(String) and getRandomNumber(int) was added. * * Revision 1.40 2006/08/10 17:39:44 umkis * For more precisely distinguishing other temporary file, four dgits random number will be attached to filename. * * Revision 1.39 2006/08/09 22:53:48 umkis * getTemporayFileName(), deleteTemporaryFiles() and getETCDir() were added. * * Revision 1.38 2006/08/09 22:48:17 umkis * Just before closing mainFrame, all temporary files is deleted. * * Revision 1.37 2006/08/02 18:44:25 jiangsc * License Update * * Revision 1.36 2006/05/04 19:41:58 chene * Add 150003 test instance * * Revision 1.35 2006/01/03 19:16:53 jiangsc * License Update * * Revision 1.34 2006/01/03 18:56:26 jiangsc * License Update * * Revision 1.33 2005/12/30 22:23:30 chene * Update JavaDoc * * Revision 1.32 2005/12/29 23:06:16 jiangsc * Changed to latest project name. * * Revision 1.31 2005/12/29 15:39:06 chene * Optimize imports * * Revision 1.30 2005/12/14 21:29:40 giordanm * no message * * Revision 1.29 2005/11/02 22:36:06 chene * change "\\" to "/" * * Revision 1.28 2005/10/26 21:30:10 chene * Clean up e.printStackTrace() * * Revision 1.27 2005/10/26 17:33:13 giordanm * bug #129 * * Revision 1.26 2005/10/25 20:20:25 chene * Support Schema Location * * Revision 1.25 2005/10/20 18:26:58 jiangsc * Updated to point to the UI default example directory. * * Revision 1.24 2005/10/19 21:49:21 chene * creat new directory workingspace, move example directory to there * * Revision 1.23 2005/08/22 17:32:45 giordanm * change the file attribute within BaseComponent from a String to a File, this checkin also contains some refactor work to the FileUtil. * * Revision 1.22 2005/08/11 22:10:38 jiangsc * Open/Save File Dialog consolidation. * * Revision 1.21 2005/08/09 22:53:04 jiangsc * Save Point * * Revision 1.20 2005/08/08 18:05:50 giordanm * a bunch of checkins that changes hard coded paths to relative paths. * * Revision 1.19 2005/07/19 22:28:03 jiangsc * 1) Renamed FunctionalBox to FunctionBox to be consistent; * 2) Added SwingWorker to OpenObjectToDbMapAction; * 3) Save Point for Function Change. * * Revision 1.18 2005/06/24 20:58:08 jiangsc * Save Point * * Revision 1.17 2005/06/21 23:03:02 jiangsc * Put in new CSVPanel Implementation. * * Revision 1.16 2005/06/08 23:02:02 jiangsc * Implemented New UI. * * Revision 1.15 2005/06/02 22:12:02 chene * no message * * Revision 1.14 2005/05/17 20:07:16 chene * Updated CVS tag test * * Revision 1.13 2005/05/17 20:05:38 chene * Updated CVS tag test * * Revision 1.12 2005/05/17 17:33:07 giordanm * remove the <br> in the javadoc heading * * Revision 1.11 2005/05/17 17:15:45 giordanm * another minor change to the CVS javadoc comments. * * Revision 1.10 2005/05/17 17:01:20 giordanm * Playing around with CVS keywords / javadoc generation. * */
upgreade searchProperties() SVN-Revision: 2136
caadapter/components/common/src/gov/nih/nci/caadapter/common/util/FileUtil.java
upgreade searchProperties()
<ide><path>aadapter/components/common/src/gov/nih/nci/caadapter/common/util/FileUtil.java <ide> * <ide> * @author OWNER: Matthew Giordano <ide> * @author LAST UPDATE $Author: altturbo $ <del> * @version $Revision: 1.31 $ <add> * @version $Revision: 1.32 $ <ide> */ <ide> <ide> public class FileUtil <ide> <ide> if (v2Loader == null) <ide> { <del> String name = "v2Meta"; <add> String name = "v2Meta/version2.4/MessageStructure/ADT_A01.dat"; <ide> <ide> Enumeration<URL> fileURLs = null; <ide> try <ide> /** <ide> * Retrieve a resource URL: work for both standealone and Webstart deployment <ide> * @param rscName <del> * @return <add> * @return URL <ide> */ <ide> public static URL retrieveResourceURL(String rscName) <ide> { <ide> <ide> /** <ide> * $Log: not supported by cvs2svn $ <add> * Revision 1.31 2009/04/21 16:55:48 altturbo <add> * update downloadFromURLtoTempFile() <add> * <ide> * Revision 1.30 2009/04/17 14:24:20 wangeug <ide> * clean code:provide meaningful printout messages <ide> *
Java
apache-2.0
aee6c26fb3d93aea8ed1860ec121ba9a0bf347f8
0
Raycoms/thesis,Raycoms/thesis,Raycoms/thesis,Raycoms/thesis
package main.java.com.bag.server; import bftsmart.reconfiguration.util.RSAKeyLoader; import bftsmart.tom.MessageContext; import bftsmart.tom.ServiceProxy; import bftsmart.tom.core.messages.TOMMessageType; import bftsmart.tom.util.TOMUtil; import com.esotericsoftware.kryo.Kryo; import com.esotericsoftware.kryo.io.ByteBufferInput; import com.esotericsoftware.kryo.io.Input; import com.esotericsoftware.kryo.io.Output; import com.esotericsoftware.kryo.pool.KryoPool; import com.github.benmanes.caffeine.cache.Cache; import com.github.benmanes.caffeine.cache.Caffeine; import main.java.com.bag.operations.CreateOperation; import main.java.com.bag.operations.DeleteOperation; import main.java.com.bag.operations.IOperation; import main.java.com.bag.operations.UpdateOperation; import main.java.com.bag.util.Constants; import main.java.com.bag.util.Log; import main.java.com.bag.util.storage.NodeStorage; import main.java.com.bag.util.storage.RelationshipStorage; import main.java.com.bag.util.storage.SignatureStorage; import org.jetbrains.annotations.NotNull; import java.security.PublicKey; import java.util.*; /** * Class handling server communication in the global cluster. */ public class GlobalClusterSlave extends AbstractRecoverable { /** * Name of the location of the global config. */ private static final String GLOBAL_CONFIG_LOCATION = "global/config"; /** * The wrapper class instance. Used to access the global cluster if possible. */ private final ServerWrapper wrapper; /** * The id of the local cluster. */ private final int id; /** * The id of the internal client used in this server */ private final int idClient; /** * Cache which holds the signatureStorages for the consistency. */ private final Cache<Long, SignatureStorage> signatureStorageCache = Caffeine.newBuilder().build(); /** * The serviceProxy to establish communication with the other replicas. */ private final ServiceProxy proxy; public GlobalClusterSlave(final int id, @NotNull final ServerWrapper wrapper, final ServerInstrumentation instrumentation) { super(id, GLOBAL_CONFIG_LOCATION, wrapper, instrumentation); this.id = id; this.idClient = id + 1000; this.wrapper = wrapper; Log.getLogger().info("Turning on client proxy with id:" + idClient); this.proxy = new ServiceProxy(this.idClient, GLOBAL_CONFIG_LOCATION); Log.getLogger().info("Turned on global cluster with id:" + id); } private byte[] makeEmptyAbortResult() { final Output output = new Output(0, 128); final KryoPool pool = new KryoPool.Builder(super.getFactory()).softReferences().build(); final Kryo kryo = pool.borrow(); kryo.writeObject(output, Constants.ABORT); byte[] temp = output.getBuffer(); output.close(); pool.release(kryo); return temp; } //Every byte array is one request. @Override public byte[][] appExecuteBatch(final byte[][] bytes, final MessageContext[] messageContexts) { byte[][] allResults = new byte[bytes.length][]; for (int i = 0; i < bytes.length; ++i) { if (messageContexts != null && messageContexts[i] != null) { KryoPool pool = new KryoPool.Builder(super.getFactory()).softReferences().build(); Kryo kryo = pool.borrow(); Input input = new Input(bytes[i]); String type = kryo.readObject(input, String.class); if (Constants.COMMIT_MESSAGE.equals(type)) { final Long timeStamp = kryo.readObject(input, Long.class); byte[] result = executeCommit(kryo, input, timeStamp); pool.release(kryo); allResults[i] = result; } else { Log.getLogger().error("Return empty bytes for message type: " + type); allResults[i] = makeEmptyAbortResult(); updateCounts(0, 0, 0, 1); } } else { Log.getLogger().error("Received message with empty context!"); allResults[i] = makeEmptyAbortResult(); updateCounts(0, 0, 0, 1); } } return allResults; } @Override void readSpecificData(final Input input, final Kryo kryo) { final int length = kryo.readObject(input, Integer.class); for (int i = 0; i < length; i++) { try { signatureStorageCache.put(kryo.readObject(input, Long.class), kryo.readObject(input, SignatureStorage.class)); } catch (ClassCastException ex) { Log.getLogger().warn("Unable to restore signatureStoreMap entry: " + i + " at server: " + id, ex); } } } @Override public Output writeSpecificData(final Output output, final Kryo kryo) { if (signatureStorageCache == null) { return output; } Log.getLogger().warn("Size at global: " + signatureStorageCache.estimatedSize()); final Map<Long, SignatureStorage> copy = signatureStorageCache.asMap(); kryo.writeObject(output, copy.size()); for (final Map.Entry<Long, SignatureStorage> entrySet : copy.entrySet()) { kryo.writeObject(output, entrySet.getKey()); kryo.writeObject(output, entrySet.getValue()); } return output; } /** * Check for conflicts and unpack things for conflict handle check. * * @param kryo the kryo instance. * @param input the input. * @return the response. */ private synchronized byte[] executeCommit(final Kryo kryo, final Input input, final long timeStamp) { //Read the inputStream. final List readsSetNodeX = kryo.readObject(input, ArrayList.class); final List readsSetRelationshipX = kryo.readObject(input, ArrayList.class); final List writeSetX = kryo.readObject(input, ArrayList.class); //Create placeHolders. ArrayList<NodeStorage> readSetNode; ArrayList<RelationshipStorage> readsSetRelationship; ArrayList<IOperation> localWriteSet; input.close(); Output output = new Output(128); kryo.writeObject(output, Constants.COMMIT_RESPONSE); try { readSetNode = (ArrayList<NodeStorage>) readsSetNodeX; readsSetRelationship = (ArrayList<RelationshipStorage>) readsSetRelationshipX; localWriteSet = (ArrayList<IOperation>) writeSetX; } catch (Exception e) { Log.getLogger().warn("Couldn't convert received data to sets. Returning abort", e); kryo.writeObject(output, Constants.ABORT); kryo.writeObject(output, getGlobalSnapshotId()); //Send abort to client and abort byte[] returnBytes = output.getBuffer(); output.close(); return returnBytes; } if (!ConflictHandler.checkForConflict(super.getGlobalWriteSet(), super.getLatestWritesSet(), new ArrayList<>(localWriteSet), readSetNode, readsSetRelationship, timeStamp, wrapper.getDataBaseAccess())) { updateCounts(0, 0, 0, 1); Log.getLogger() .info("Found conflict, returning abort with timestamp: " + timeStamp + " globalSnapshot at: " + getGlobalSnapshotId() + " and writes: " + localWriteSet.size() + " and reads: " + readSetNode.size() + " + " + readsSetRelationship.size()); kryo.writeObject(output, Constants.ABORT); kryo.writeObject(output, getGlobalSnapshotId()); if(!localWriteSet.isEmpty()) { Log.getLogger().warn("Aborting of: " + getGlobalSnapshotId()); for(IOperation operation: localWriteSet) { Log.getLogger().warn(operation.toString()); } for(NodeStorage nodeStorage : readSetNode) { Log.getLogger().warn(nodeStorage.toString()); } for(RelationshipStorage nodeStorage : readsSetRelationship) { Log.getLogger().warn(nodeStorage.toString()); } } //Send abort to client and abort byte[] returnBytes = output.getBuffer(); output.close(); return returnBytes; } if (!localWriteSet.isEmpty()) { Log.getLogger().warn("Comitting: " + getGlobalSnapshotId()); for(IOperation operation: localWriteSet) { Log.getLogger().warn(operation.toString()); } for(NodeStorage nodeStorage : readSetNode) { Log.getLogger().warn(nodeStorage.toString()); } for(RelationshipStorage nodeStorage : readsSetRelationship) { Log.getLogger().warn(nodeStorage.toString()); } super.executeCommit(localWriteSet, "master"); if (wrapper.getLocalCLuster() != null) { signCommitWithDecisionAndDistribute(localWriteSet, Constants.COMMIT, getGlobalSnapshotId(), kryo); } } else { updateCounts(0, 0, 1, 0); } kryo.writeObject(output, Constants.COMMIT); kryo.writeObject(output, getGlobalSnapshotId()); byte[] returnBytes = output.getBuffer(); output.close(); Log.getLogger().info("No conflict found, returning commit with snapShot id: " + getGlobalSnapshotId() + " size: " + returnBytes.length); return returnBytes; } /** * Check for conflicts and unpack things for conflict handle check. * * @param kryo the kryo instance. * @param input the input. * @return the response. */ private byte[] executeReadOnlyCommit(final Kryo kryo, final Input input, final long timeStamp) { //Read the inputStream. final List readsSetNodeX = kryo.readObject(input, ArrayList.class); final List readsSetRelationshipX = kryo.readObject(input, ArrayList.class); final List writeSetX = kryo.readObject(input, ArrayList.class); //Create placeHolders. ArrayList<NodeStorage> readSetNode; ArrayList<RelationshipStorage> readsSetRelationship; ArrayList<IOperation> localWriteSet; input.close(); Output output = new Output(128); kryo.writeObject(output, Constants.COMMIT_RESPONSE); try { readSetNode = (ArrayList<NodeStorage>) readsSetNodeX; readsSetRelationship = (ArrayList<RelationshipStorage>) readsSetRelationshipX; localWriteSet = (ArrayList<IOperation>) writeSetX; } catch (Exception e) { Log.getLogger().warn("Couldn't convert received data to sets. Returning abort", e); kryo.writeObject(output, Constants.ABORT); kryo.writeObject(output, getGlobalSnapshotId()); //Send abort to client and abort byte[] returnBytes = output.getBuffer(); output.close(); return returnBytes; } if (!ConflictHandler.checkForConflict(super.getGlobalWriteSet(), super.getLatestWritesSet(), localWriteSet, readSetNode, readsSetRelationship, timeStamp, wrapper.getDataBaseAccess())) { updateCounts(0, 0, 0, 1); Log.getLogger() .info("Found conflict, returning abort with timestamp: " + timeStamp + " globalSnapshot at: " + getGlobalSnapshotId() + " and writes: " + localWriteSet.size() + " and reads: " + readSetNode.size() + " + " + readsSetRelationship.size()); kryo.writeObject(output, Constants.ABORT); kryo.writeObject(output, getGlobalSnapshotId()); //Send abort to client and abort byte[] returnBytes = output.getBuffer(); output.close(); return returnBytes; } updateCounts(0, 0, 1, 0); kryo.writeObject(output, Constants.COMMIT); kryo.writeObject(output, getGlobalSnapshotId()); byte[] returnBytes = output.getBuffer(); output.close(); Log.getLogger().info("No conflict found, returning commit with snapShot id: " + getGlobalSnapshotId() + " size: " + returnBytes.length); return returnBytes; } private class SignatureThread extends Thread { final List<IOperation> localWriteSet; final String commit; final long globalSnapshotId; final Kryo kryo; private SignatureThread(final List<IOperation> localWriteSet, final String commit, final long globalSnapshotId, final Kryo kryo) { this.localWriteSet = localWriteSet; this.commit = commit; this.globalSnapshotId = globalSnapshotId; this.kryo = kryo; } @Override public void run() { signCommitWithDecisionAndDistribute(localWriteSet, Constants.COMMIT, getGlobalSnapshotId(), kryo); } } private void signCommitWithDecisionAndDistribute(final List<IOperation> localWriteSet, final String decision, final long snapShotId, final Kryo kryo) { Log.getLogger().info("Sending signed commit to the other global replicas"); final RSAKeyLoader rsaLoader = new RSAKeyLoader(this.idClient, GLOBAL_CONFIG_LOCATION, false); //Todo probably will need a bigger buffer in the future. size depending on the set size? final Output output = new Output(0, 100240); kryo.writeObject(output, Constants.SIGNATURE_MESSAGE); kryo.writeObject(output, decision); kryo.writeObject(output, snapShotId); kryo.writeObject(output, localWriteSet); final byte[] message = output.toBytes(); final byte[] signature; try { signature = TOMUtil.signMessage(rsaLoader.loadPrivateKey(), message); } catch (Exception e) { Log.getLogger().warn("Unable to sign message at server " + id, e); return; } final SignatureStorage signatureStorage; if (signatureStorageCache.getIfPresent(getGlobalSnapshotId()) != null) { signatureStorage = signatureStorageCache.getIfPresent(getGlobalSnapshotId()); if (signatureStorage.getMessage().length != output.toBytes().length) { Log.getLogger().error("Message in signatureStorage: " + signatureStorage.getMessage().length + " message of committing server: " + message.length); } } else { Log.getLogger().info("Size of message stored is: " + message.length); signatureStorage = new SignatureStorage(super.getReplica().getReplicaContext().getStaticConfiguration().getN() - 1, message, decision); signatureStorageCache.put(snapShotId, signatureStorage); } signatureStorage.setProcessed(); Log.getLogger().info("Set processed by global cluster: " + snapShotId + " by: " + idClient); signatureStorage.addSignatures(idClient, signature); if (signatureStorage.hasEnough()) { Log.getLogger().info("Sending update to slave signed by all members: " + snapShotId); updateSlave(signatureStorage); signatureStorage.setDistributed(); if (signatureStorage.hasAll()) { signatureStorageCache.invalidate(snapShotId); } } else { signatureStorageCache.put(snapShotId, signatureStorage); } kryo.writeObject(output, message.length); kryo.writeObject(output, signature.length); output.writeBytes(signature); proxy.sendMessageToTargets(output.getBuffer(), 0, proxy.getViewManager().getCurrentViewProcesses(), TOMMessageType.UNORDERED_REQUEST); output.close(); } private Output makeEmptyReadResponse(String message, Kryo kryo) { final Output output = new Output(0, 10240); kryo.writeObject(output, message); kryo.writeObject(output, new ArrayList<NodeStorage>()); kryo.writeObject(output, new ArrayList<RelationshipStorage>()); return output; } /** * Handle a signature message. * * @param input the message. * @param messageContext the context. * @param kryo the kryo object. */ private void handleSignatureMessage(final Input input, final MessageContext messageContext, final Kryo kryo) { //Our own message. if (idClient == messageContext.getSender()) { return; } final byte[] buffer = input.getBuffer(); final String decision = kryo.readObject(input, String.class); final Long snapShotId = kryo.readObject(input, Long.class); final List writeSet = kryo.readObject(input, ArrayList.class); final ArrayList<IOperation> localWriteSet; try { localWriteSet = (ArrayList<IOperation>) writeSet; } catch (ClassCastException e) { Log.getLogger().warn("Couldn't convert received signature message.", e); return; } Log.getLogger().info("Server: " + id + " Received message to sign with snapShotId: " + snapShotId + " of Server " + messageContext.getSender() + " and decision: " + decision + " and a writeSet of the length of: " + localWriteSet.size()); final int messageLength = kryo.readObject(input, Integer.class); final int signatureLength = kryo.readObject(input, Integer.class); final byte[] signature = input.readBytes(signatureLength); //Not required anymore. input.close(); final RSAKeyLoader rsaLoader = new RSAKeyLoader(messageContext.getSender(), GLOBAL_CONFIG_LOCATION, false); final PublicKey key; try { key = rsaLoader.loadPublicKey(); } catch (Exception e) { Log.getLogger().warn("Unable to load public key on server " + id + " sent by server " + messageContext.getSender(), e); return; } final byte[] message = new byte[messageLength]; System.arraycopy(buffer, 0, message, 0, messageLength); boolean signatureMatches = TOMUtil.verifySignature(key, message, signature); if (signatureMatches) { storeSignedMessage(snapShotId, signature, messageContext, decision, message, writeSet); return; } Log.getLogger().warn("Signature doesn't match of message, throwing message away."); } @Override public byte[] appExecuteUnordered(final byte[] bytes, final MessageContext messageContext) { Log.getLogger().info("Received unordered message at global replica"); final KryoPool pool = new KryoPool.Builder(getFactory()).softReferences().build(); final Kryo kryo = pool.borrow(); final Input input = new Input(bytes); final String messageType = kryo.readObject(input, String.class); Output output = new Output(1, 804800); switch (messageType) { case Constants.READ_MESSAGE: Log.getLogger().info("Received Node read message"); try { kryo.writeObject(output, Constants.READ_MESSAGE); output = handleNodeRead(input, messageContext, kryo, output); } catch (Throwable t) { Log.getLogger().error("Error on " + Constants.READ_MESSAGE + ", returning empty read", t); output = makeEmptyReadResponse(Constants.READ_MESSAGE, kryo); } break; case Constants.RELATIONSHIP_READ_MESSAGE: Log.getLogger().info("Received Relationship read message"); try { kryo.writeObject(output, Constants.READ_MESSAGE); output = handleRelationshipRead(input, kryo, output); } catch (Throwable t) { Log.getLogger().error("Error on " + Constants.RELATIONSHIP_READ_MESSAGE + ", returning empty read", t); output = makeEmptyReadResponse(Constants.RELATIONSHIP_READ_MESSAGE, kryo); } break; case Constants.SIGNATURE_MESSAGE: if (wrapper.getLocalCLuster() != null) { handleSignatureMessage(input, messageContext, kryo); } break; case Constants.REGISTER_GLOBALLY_MESSAGE: Log.getLogger().info("Received register globally message"); output.close(); input.close(); pool.release(kryo); return handleRegisteringSlave(input, kryo); case Constants.REGISTER_GLOBALLY_CHECK: Log.getLogger().info("Received globally check message"); output.close(); input.close(); pool.release(kryo); return handleGlobalRegistryCheck(input, kryo); case Constants.COMMIT: Log.getLogger().info("Received commit message"); output.close(); byte[] result = handleReadOnlyCommit(input, kryo); input.close(); pool.release(kryo); Log.getLogger().info("Return it to client, size: " + result.length); return result; default: Log.getLogger().warn("Incorrect operation sent unordered to the server"); break; } byte[] returnValue = output.getBuffer(); Log.getLogger().info("Return it to client, size: " + returnValue.length); input.close(); output.close(); pool.release(kryo); return returnValue; } private byte[] handleReadOnlyCommit(final Input input, final Kryo kryo) { final Long timeStamp = kryo.readObject(input, Long.class); return executeReadOnlyCommit(kryo, input, timeStamp); } /** * Handle the check for the global registering of a slave. * * @param input the incoming message. * @param kryo the kryo instance. * @return the reply */ private byte[] handleGlobalRegistryCheck(final Input input, final Kryo kryo) { final Output output = new Output(512); kryo.writeObject(output, Constants.REGISTER_GLOBALLY_CHECK); boolean decision = false; if (!wrapper.getLocalCLuster().isPrimary() || wrapper.getLocalCLuster().askIfIsPrimary(kryo.readObject(input, Integer.class), kryo.readObject(input, Integer.class), kryo)) { decision = true; } kryo.writeObject(output, decision); final byte[] result = output.getBuffer(); output.close(); input.close(); return result; } /** * This message comes from the local cluster. * Will respond true if it can register. * Message which handles slaves registering at the global cluster. * * @param kryo the kryo instance. * @param input the message. * @return the message in bytes. */ @SuppressWarnings("squid:S2095") private byte[] handleRegisteringSlave(final Input input, final Kryo kryo) { final int localClusterID = kryo.readObject(input, Integer.class); final int newPrimary = kryo.readObject(input, Integer.class); final int oldPrimary = kryo.readObject(input, Integer.class); final ServiceProxy localProxy = new ServiceProxy(1000 + oldPrimary, "local" + localClusterID); final Output output = new Output(512); kryo.writeObject(output, Constants.REGISTER_GLOBALLY_CHECK); kryo.writeObject(output, newPrimary); byte[] result = localProxy.invokeUnordered(output.getBuffer()); final Output nextOutput = new Output(512); kryo.writeObject(output, Constants.REGISTER_GLOBALLY_REPLY); final Input answer = new Input(result); if (Constants.REGISTER_GLOBALLY_REPLY.equals(answer.readString())) { kryo.writeObject(nextOutput, answer.readBoolean()); } final byte[] returnBuffer = nextOutput.getBuffer(); nextOutput.close(); answer.close(); localProxy.close(); output.close(); return returnBuffer; //remove currentView and edit system.config //If alright send the result to all remaining global clusters so that they update themselves. } /** * Store the signed message on the server. * If n-f messages arrived send it to client. * * @param snapShotId the snapShotId as key. * @param signature the signature * @param context the message context. * @param decision the decision. * @param message the message. */ private void storeSignedMessage( final Long snapShotId, final byte[] signature, @NotNull final MessageContext context, final String decision, final byte[] message, final List<IOperation> writeSet) { final SignatureStorage signatureStorage; if (signatureStorageCache.getIfPresent(snapShotId) == null) { signatureStorage = new SignatureStorage(super.getReplica().getReplicaContext().getStaticConfiguration().getN() - 1, message, decision); signatureStorageCache.put(snapShotId, signatureStorage); Log.getLogger().info("Replica: " + id + " did not have the transaction prepared. Might be slow or corrupted, message size stored: " + message.length); } else { signatureStorage = signatureStorageCache.getIfPresent(snapShotId); if (!signatureStorage.getDecision().equals(decision)) { Log.getLogger().error("Different decision"); } } if (signatureStorage.getMessage().length != message.length) { Log.getLogger().error("Message in signatureStorage: " + signatureStorage.getMessage().length + " message of writing server " + message.length); final KryoPool pool = new KryoPool.Builder(super.getFactory()).softReferences().build(); final Kryo kryo = pool.borrow(); Log.getLogger().warn("Start logging"); final Input input = new Input(new ByteBufferInput(signatureStorage.getMessage())); kryo.readObject(input, String.class); kryo.readObject(input, String.class); final Long snapShotId2 = kryo.readObject(input, Long.class); Log.getLogger().warn("SnapshotId local: " + snapShotId2 + " snapshotId received: " + snapShotId); final List lWriteSet = kryo.readObject(input, ArrayList.class); Log.getLogger().warn("WriteSet received: " + writeSet.size()); for (IOperation op : writeSet) { if (op instanceof UpdateOperation) { Log.getLogger().warn("Update"); Log.getLogger().warn(((UpdateOperation) op).getKey().toString()); Log.getLogger().warn(((UpdateOperation) op).getValue().toString()); } else if (op instanceof DeleteOperation) { Log.getLogger().warn("Delete"); Log.getLogger().warn(((DeleteOperation) op).getObject().toString()); } else if (op instanceof CreateOperation) { Log.getLogger().warn("Create"); Log.getLogger().warn(((CreateOperation) op).getObject().toString()); } } final ArrayList<IOperation> localWriteSet2; input.close(); try { localWriteSet2 = (ArrayList<IOperation>) lWriteSet; } catch (ClassCastException e) { Log.getLogger().warn("Couldn't convert received signature message.", e); return; } Log.getLogger().warn("WriteSet local: " + localWriteSet2.size()); for (IOperation op : localWriteSet2) { if (op instanceof UpdateOperation) { Log.getLogger().warn("Update"); Log.getLogger().warn(((UpdateOperation) op).getKey().toString()); Log.getLogger().warn(((UpdateOperation) op).getValue().toString()); } else if (op instanceof DeleteOperation) { Log.getLogger().warn("Delete"); Log.getLogger().warn(((DeleteOperation) op).getObject().toString()); } else if (op instanceof CreateOperation) { Log.getLogger().warn("Create"); Log.getLogger().warn(((CreateOperation) op).getObject().toString()); } } Log.getLogger().warn("End logging"); } if (!decision.equals(signatureStorage.getDecision())) { Log.getLogger().warn("Replica: " + id + " did receive a different decision of replica: " + context.getSender() + ". Might be corrupted."); return; } signatureStorage.addSignatures(context.getSender(), signature); Log.getLogger().info("Adding signature to signatureStorage, has: " + signatureStorage.getSignatures().size() + " is: " + signatureStorage.isProcessed() + " by: " + context.getSender()); if (signatureStorage.hasEnough()) { Log.getLogger().info("Sending update to slave signed by all members: " + snapShotId); if (signatureStorage.isProcessed()) { updateSlave(signatureStorage); signatureStorage.setDistributed(); } if (signatureStorage.hasAll() && signatureStorage.isDistributed()) { signatureStorageCache.invalidate(snapShotId); return; } } signatureStorageCache.put(snapShotId, signatureStorage); } /** * Update the slave with a transaction. * * @param signatureStorage the signatureStorage with message and signatures.. */ private void updateSlave(final SignatureStorage signatureStorage) { if (this.wrapper.getLocalCLuster() != null) { this.wrapper.getLocalCLuster().propagateUpdate(new SignatureStorage(signatureStorage)); } } /** * Invoke a message to the global cluster. * * @param input the input object. * @return the response. */ public Output invokeGlobally(final Input input) { return new Output(proxy.invokeOrdered(input.getBuffer())); } /** * Closes the global cluster and his code. */ public void close() { super.terminate(); proxy.close(); } }
src/main/java/main/java/com/bag/server/GlobalClusterSlave.java
package main.java.com.bag.server; import bftsmart.reconfiguration.util.RSAKeyLoader; import bftsmart.tom.MessageContext; import bftsmart.tom.ServiceProxy; import bftsmart.tom.core.messages.TOMMessageType; import bftsmart.tom.util.TOMUtil; import com.esotericsoftware.kryo.Kryo; import com.esotericsoftware.kryo.io.ByteBufferInput; import com.esotericsoftware.kryo.io.Input; import com.esotericsoftware.kryo.io.Output; import com.esotericsoftware.kryo.pool.KryoPool; import com.github.benmanes.caffeine.cache.Cache; import com.github.benmanes.caffeine.cache.Caffeine; import main.java.com.bag.operations.CreateOperation; import main.java.com.bag.operations.DeleteOperation; import main.java.com.bag.operations.IOperation; import main.java.com.bag.operations.UpdateOperation; import main.java.com.bag.util.Constants; import main.java.com.bag.util.Log; import main.java.com.bag.util.storage.NodeStorage; import main.java.com.bag.util.storage.RelationshipStorage; import main.java.com.bag.util.storage.SignatureStorage; import org.jetbrains.annotations.NotNull; import java.security.PublicKey; import java.util.*; /** * Class handling server communication in the global cluster. */ public class GlobalClusterSlave extends AbstractRecoverable { /** * Name of the location of the global config. */ private static final String GLOBAL_CONFIG_LOCATION = "global/config"; /** * The wrapper class instance. Used to access the global cluster if possible. */ private final ServerWrapper wrapper; /** * The id of the local cluster. */ private final int id; /** * The id of the internal client used in this server */ private final int idClient; /** * Cache which holds the signatureStorages for the consistency. */ private final Cache<Long, SignatureStorage> signatureStorageCache = Caffeine.newBuilder().build(); /** * The serviceProxy to establish communication with the other replicas. */ private final ServiceProxy proxy; public GlobalClusterSlave(final int id, @NotNull final ServerWrapper wrapper, final ServerInstrumentation instrumentation) { super(id, GLOBAL_CONFIG_LOCATION, wrapper, instrumentation); this.id = id; this.idClient = id + 1000; this.wrapper = wrapper; Log.getLogger().info("Turning on client proxy with id:" + idClient); this.proxy = new ServiceProxy(this.idClient, GLOBAL_CONFIG_LOCATION); Log.getLogger().info("Turned on global cluster with id:" + id); } private byte[] makeEmptyAbortResult() { final Output output = new Output(0, 128); final KryoPool pool = new KryoPool.Builder(super.getFactory()).softReferences().build(); final Kryo kryo = pool.borrow(); kryo.writeObject(output, Constants.ABORT); byte[] temp = output.getBuffer(); output.close(); pool.release(kryo); return temp; } //Every byte array is one request. @Override public byte[][] appExecuteBatch(final byte[][] bytes, final MessageContext[] messageContexts) { byte[][] allResults = new byte[bytes.length][]; for (int i = 0; i < bytes.length; ++i) { if (messageContexts != null && messageContexts[i] != null) { KryoPool pool = new KryoPool.Builder(super.getFactory()).softReferences().build(); Kryo kryo = pool.borrow(); Input input = new Input(bytes[i]); String type = kryo.readObject(input, String.class); if (Constants.COMMIT_MESSAGE.equals(type)) { final Long timeStamp = kryo.readObject(input, Long.class); byte[] result = executeCommit(kryo, input, timeStamp); pool.release(kryo); allResults[i] = result; } else { Log.getLogger().error("Return empty bytes for message type: " + type); allResults[i] = makeEmptyAbortResult(); updateCounts(0, 0, 0, 1); } } else { Log.getLogger().error("Received message with empty context!"); allResults[i] = makeEmptyAbortResult(); updateCounts(0, 0, 0, 1); } } return allResults; } @Override void readSpecificData(final Input input, final Kryo kryo) { final int length = kryo.readObject(input, Integer.class); for (int i = 0; i < length; i++) { try { signatureStorageCache.put(kryo.readObject(input, Long.class), kryo.readObject(input, SignatureStorage.class)); } catch (ClassCastException ex) { Log.getLogger().warn("Unable to restore signatureStoreMap entry: " + i + " at server: " + id, ex); } } } @Override public Output writeSpecificData(final Output output, final Kryo kryo) { if (signatureStorageCache == null) { return output; } Log.getLogger().warn("Size at global: " + signatureStorageCache.estimatedSize()); final Map<Long, SignatureStorage> copy = signatureStorageCache.asMap(); kryo.writeObject(output, copy.size()); for (final Map.Entry<Long, SignatureStorage> entrySet : copy.entrySet()) { kryo.writeObject(output, entrySet.getKey()); kryo.writeObject(output, entrySet.getValue()); } return output; } /** * Check for conflicts and unpack things for conflict handle check. * * @param kryo the kryo instance. * @param input the input. * @return the response. */ private synchronized byte[] executeCommit(final Kryo kryo, final Input input, final long timeStamp) { //Read the inputStream. final List readsSetNodeX = kryo.readObject(input, ArrayList.class); final List readsSetRelationshipX = kryo.readObject(input, ArrayList.class); final List writeSetX = kryo.readObject(input, ArrayList.class); //Create placeHolders. ArrayList<NodeStorage> readSetNode; ArrayList<RelationshipStorage> readsSetRelationship; ArrayList<IOperation> localWriteSet; input.close(); Output output = new Output(128); kryo.writeObject(output, Constants.COMMIT_RESPONSE); try { readSetNode = (ArrayList<NodeStorage>) readsSetNodeX; readsSetRelationship = (ArrayList<RelationshipStorage>) readsSetRelationshipX; localWriteSet = (ArrayList<IOperation>) writeSetX; } catch (Exception e) { Log.getLogger().warn("Couldn't convert received data to sets. Returning abort", e); kryo.writeObject(output, Constants.ABORT); kryo.writeObject(output, getGlobalSnapshotId()); //Send abort to client and abort byte[] returnBytes = output.getBuffer(); output.close(); return returnBytes; } if (!ConflictHandler.checkForConflict(super.getGlobalWriteSet(), super.getLatestWritesSet(), new ArrayList<>(localWriteSet), readSetNode, readsSetRelationship, timeStamp, wrapper.getDataBaseAccess())) { updateCounts(0, 0, 0, 1); Log.getLogger() .info("Found conflict, returning abort with timestamp: " + timeStamp + " globalSnapshot at: " + getGlobalSnapshotId() + " and writes: " + localWriteSet.size() + " and reads: " + readSetNode.size() + " + " + readsSetRelationship.size()); kryo.writeObject(output, Constants.ABORT); kryo.writeObject(output, getGlobalSnapshotId()); if(!localWriteSet.isEmpty()) { Log.getLogger().warn("Aborting!!!"); for(IOperation operation: localWriteSet) { Log.getLogger().warn(operation.toString()); } for(NodeStorage nodeStorage : readSetNode) { Log.getLogger().warn(nodeStorage.toString()); } for(RelationshipStorage nodeStorage : readsSetRelationship) { Log.getLogger().warn(nodeStorage.toString()); } } //Send abort to client and abort byte[] returnBytes = output.getBuffer(); output.close(); return returnBytes; } if (!localWriteSet.isEmpty()) { Log.getLogger().warn("Comitting!!!"); for(IOperation operation: localWriteSet) { Log.getLogger().warn(operation.toString()); } for(NodeStorage nodeStorage : readSetNode) { Log.getLogger().warn(nodeStorage.toString()); } for(RelationshipStorage nodeStorage : readsSetRelationship) { Log.getLogger().warn(nodeStorage.toString()); } super.executeCommit(localWriteSet, "master"); if (wrapper.getLocalCLuster() != null) { signCommitWithDecisionAndDistribute(localWriteSet, Constants.COMMIT, getGlobalSnapshotId(), kryo); } } else { updateCounts(0, 0, 1, 0); } kryo.writeObject(output, Constants.COMMIT); kryo.writeObject(output, getGlobalSnapshotId()); byte[] returnBytes = output.getBuffer(); output.close(); Log.getLogger().info("No conflict found, returning commit with snapShot id: " + getGlobalSnapshotId() + " size: " + returnBytes.length); return returnBytes; } /** * Check for conflicts and unpack things for conflict handle check. * * @param kryo the kryo instance. * @param input the input. * @return the response. */ private byte[] executeReadOnlyCommit(final Kryo kryo, final Input input, final long timeStamp) { //Read the inputStream. final List readsSetNodeX = kryo.readObject(input, ArrayList.class); final List readsSetRelationshipX = kryo.readObject(input, ArrayList.class); final List writeSetX = kryo.readObject(input, ArrayList.class); //Create placeHolders. ArrayList<NodeStorage> readSetNode; ArrayList<RelationshipStorage> readsSetRelationship; ArrayList<IOperation> localWriteSet; input.close(); Output output = new Output(128); kryo.writeObject(output, Constants.COMMIT_RESPONSE); try { readSetNode = (ArrayList<NodeStorage>) readsSetNodeX; readsSetRelationship = (ArrayList<RelationshipStorage>) readsSetRelationshipX; localWriteSet = (ArrayList<IOperation>) writeSetX; } catch (Exception e) { Log.getLogger().warn("Couldn't convert received data to sets. Returning abort", e); kryo.writeObject(output, Constants.ABORT); kryo.writeObject(output, getGlobalSnapshotId()); //Send abort to client and abort byte[] returnBytes = output.getBuffer(); output.close(); return returnBytes; } if (!ConflictHandler.checkForConflict(super.getGlobalWriteSet(), super.getLatestWritesSet(), localWriteSet, readSetNode, readsSetRelationship, timeStamp, wrapper.getDataBaseAccess())) { updateCounts(0, 0, 0, 1); Log.getLogger() .info("Found conflict, returning abort with timestamp: " + timeStamp + " globalSnapshot at: " + getGlobalSnapshotId() + " and writes: " + localWriteSet.size() + " and reads: " + readSetNode.size() + " + " + readsSetRelationship.size()); kryo.writeObject(output, Constants.ABORT); kryo.writeObject(output, getGlobalSnapshotId()); //Send abort to client and abort byte[] returnBytes = output.getBuffer(); output.close(); return returnBytes; } updateCounts(0, 0, 1, 0); kryo.writeObject(output, Constants.COMMIT); kryo.writeObject(output, getGlobalSnapshotId()); byte[] returnBytes = output.getBuffer(); output.close(); Log.getLogger().info("No conflict found, returning commit with snapShot id: " + getGlobalSnapshotId() + " size: " + returnBytes.length); return returnBytes; } private class SignatureThread extends Thread { final List<IOperation> localWriteSet; final String commit; final long globalSnapshotId; final Kryo kryo; private SignatureThread(final List<IOperation> localWriteSet, final String commit, final long globalSnapshotId, final Kryo kryo) { this.localWriteSet = localWriteSet; this.commit = commit; this.globalSnapshotId = globalSnapshotId; this.kryo = kryo; } @Override public void run() { signCommitWithDecisionAndDistribute(localWriteSet, Constants.COMMIT, getGlobalSnapshotId(), kryo); } } private void signCommitWithDecisionAndDistribute(final List<IOperation> localWriteSet, final String decision, final long snapShotId, final Kryo kryo) { Log.getLogger().info("Sending signed commit to the other global replicas"); final RSAKeyLoader rsaLoader = new RSAKeyLoader(this.idClient, GLOBAL_CONFIG_LOCATION, false); //Todo probably will need a bigger buffer in the future. size depending on the set size? final Output output = new Output(0, 100240); kryo.writeObject(output, Constants.SIGNATURE_MESSAGE); kryo.writeObject(output, decision); kryo.writeObject(output, snapShotId); kryo.writeObject(output, localWriteSet); final byte[] message = output.toBytes(); final byte[] signature; try { signature = TOMUtil.signMessage(rsaLoader.loadPrivateKey(), message); } catch (Exception e) { Log.getLogger().warn("Unable to sign message at server " + id, e); return; } final SignatureStorage signatureStorage; if (signatureStorageCache.getIfPresent(getGlobalSnapshotId()) != null) { signatureStorage = signatureStorageCache.getIfPresent(getGlobalSnapshotId()); if (signatureStorage.getMessage().length != output.toBytes().length) { Log.getLogger().error("Message in signatureStorage: " + signatureStorage.getMessage().length + " message of committing server: " + message.length); } } else { Log.getLogger().info("Size of message stored is: " + message.length); signatureStorage = new SignatureStorage(super.getReplica().getReplicaContext().getStaticConfiguration().getN() - 1, message, decision); signatureStorageCache.put(snapShotId, signatureStorage); } signatureStorage.setProcessed(); Log.getLogger().info("Set processed by global cluster: " + snapShotId + " by: " + idClient); signatureStorage.addSignatures(idClient, signature); if (signatureStorage.hasEnough()) { Log.getLogger().info("Sending update to slave signed by all members: " + snapShotId); updateSlave(signatureStorage); signatureStorage.setDistributed(); if (signatureStorage.hasAll()) { signatureStorageCache.invalidate(snapShotId); } } else { signatureStorageCache.put(snapShotId, signatureStorage); } kryo.writeObject(output, message.length); kryo.writeObject(output, signature.length); output.writeBytes(signature); proxy.sendMessageToTargets(output.getBuffer(), 0, proxy.getViewManager().getCurrentViewProcesses(), TOMMessageType.UNORDERED_REQUEST); output.close(); } private Output makeEmptyReadResponse(String message, Kryo kryo) { final Output output = new Output(0, 10240); kryo.writeObject(output, message); kryo.writeObject(output, new ArrayList<NodeStorage>()); kryo.writeObject(output, new ArrayList<RelationshipStorage>()); return output; } /** * Handle a signature message. * * @param input the message. * @param messageContext the context. * @param kryo the kryo object. */ private void handleSignatureMessage(final Input input, final MessageContext messageContext, final Kryo kryo) { //Our own message. if (idClient == messageContext.getSender()) { return; } final byte[] buffer = input.getBuffer(); final String decision = kryo.readObject(input, String.class); final Long snapShotId = kryo.readObject(input, Long.class); final List writeSet = kryo.readObject(input, ArrayList.class); final ArrayList<IOperation> localWriteSet; try { localWriteSet = (ArrayList<IOperation>) writeSet; } catch (ClassCastException e) { Log.getLogger().warn("Couldn't convert received signature message.", e); return; } Log.getLogger().info("Server: " + id + " Received message to sign with snapShotId: " + snapShotId + " of Server " + messageContext.getSender() + " and decision: " + decision + " and a writeSet of the length of: " + localWriteSet.size()); final int messageLength = kryo.readObject(input, Integer.class); final int signatureLength = kryo.readObject(input, Integer.class); final byte[] signature = input.readBytes(signatureLength); //Not required anymore. input.close(); final RSAKeyLoader rsaLoader = new RSAKeyLoader(messageContext.getSender(), GLOBAL_CONFIG_LOCATION, false); final PublicKey key; try { key = rsaLoader.loadPublicKey(); } catch (Exception e) { Log.getLogger().warn("Unable to load public key on server " + id + " sent by server " + messageContext.getSender(), e); return; } final byte[] message = new byte[messageLength]; System.arraycopy(buffer, 0, message, 0, messageLength); boolean signatureMatches = TOMUtil.verifySignature(key, message, signature); if (signatureMatches) { storeSignedMessage(snapShotId, signature, messageContext, decision, message, writeSet); return; } Log.getLogger().warn("Signature doesn't match of message, throwing message away."); } @Override public byte[] appExecuteUnordered(final byte[] bytes, final MessageContext messageContext) { Log.getLogger().info("Received unordered message at global replica"); final KryoPool pool = new KryoPool.Builder(getFactory()).softReferences().build(); final Kryo kryo = pool.borrow(); final Input input = new Input(bytes); final String messageType = kryo.readObject(input, String.class); Output output = new Output(1, 804800); switch (messageType) { case Constants.READ_MESSAGE: Log.getLogger().info("Received Node read message"); try { kryo.writeObject(output, Constants.READ_MESSAGE); output = handleNodeRead(input, messageContext, kryo, output); } catch (Throwable t) { Log.getLogger().error("Error on " + Constants.READ_MESSAGE + ", returning empty read", t); output = makeEmptyReadResponse(Constants.READ_MESSAGE, kryo); } break; case Constants.RELATIONSHIP_READ_MESSAGE: Log.getLogger().info("Received Relationship read message"); try { kryo.writeObject(output, Constants.READ_MESSAGE); output = handleRelationshipRead(input, kryo, output); } catch (Throwable t) { Log.getLogger().error("Error on " + Constants.RELATIONSHIP_READ_MESSAGE + ", returning empty read", t); output = makeEmptyReadResponse(Constants.RELATIONSHIP_READ_MESSAGE, kryo); } break; case Constants.SIGNATURE_MESSAGE: if (wrapper.getLocalCLuster() != null) { handleSignatureMessage(input, messageContext, kryo); } break; case Constants.REGISTER_GLOBALLY_MESSAGE: Log.getLogger().info("Received register globally message"); output.close(); input.close(); pool.release(kryo); return handleRegisteringSlave(input, kryo); case Constants.REGISTER_GLOBALLY_CHECK: Log.getLogger().info("Received globally check message"); output.close(); input.close(); pool.release(kryo); return handleGlobalRegistryCheck(input, kryo); case Constants.COMMIT: Log.getLogger().info("Received commit message"); output.close(); byte[] result = handleReadOnlyCommit(input, kryo); input.close(); pool.release(kryo); Log.getLogger().info("Return it to client, size: " + result.length); return result; default: Log.getLogger().warn("Incorrect operation sent unordered to the server"); break; } byte[] returnValue = output.getBuffer(); Log.getLogger().info("Return it to client, size: " + returnValue.length); input.close(); output.close(); pool.release(kryo); return returnValue; } private byte[] handleReadOnlyCommit(final Input input, final Kryo kryo) { final Long timeStamp = kryo.readObject(input, Long.class); return executeReadOnlyCommit(kryo, input, timeStamp); } /** * Handle the check for the global registering of a slave. * * @param input the incoming message. * @param kryo the kryo instance. * @return the reply */ private byte[] handleGlobalRegistryCheck(final Input input, final Kryo kryo) { final Output output = new Output(512); kryo.writeObject(output, Constants.REGISTER_GLOBALLY_CHECK); boolean decision = false; if (!wrapper.getLocalCLuster().isPrimary() || wrapper.getLocalCLuster().askIfIsPrimary(kryo.readObject(input, Integer.class), kryo.readObject(input, Integer.class), kryo)) { decision = true; } kryo.writeObject(output, decision); final byte[] result = output.getBuffer(); output.close(); input.close(); return result; } /** * This message comes from the local cluster. * Will respond true if it can register. * Message which handles slaves registering at the global cluster. * * @param kryo the kryo instance. * @param input the message. * @return the message in bytes. */ @SuppressWarnings("squid:S2095") private byte[] handleRegisteringSlave(final Input input, final Kryo kryo) { final int localClusterID = kryo.readObject(input, Integer.class); final int newPrimary = kryo.readObject(input, Integer.class); final int oldPrimary = kryo.readObject(input, Integer.class); final ServiceProxy localProxy = new ServiceProxy(1000 + oldPrimary, "local" + localClusterID); final Output output = new Output(512); kryo.writeObject(output, Constants.REGISTER_GLOBALLY_CHECK); kryo.writeObject(output, newPrimary); byte[] result = localProxy.invokeUnordered(output.getBuffer()); final Output nextOutput = new Output(512); kryo.writeObject(output, Constants.REGISTER_GLOBALLY_REPLY); final Input answer = new Input(result); if (Constants.REGISTER_GLOBALLY_REPLY.equals(answer.readString())) { kryo.writeObject(nextOutput, answer.readBoolean()); } final byte[] returnBuffer = nextOutput.getBuffer(); nextOutput.close(); answer.close(); localProxy.close(); output.close(); return returnBuffer; //remove currentView and edit system.config //If alright send the result to all remaining global clusters so that they update themselves. } /** * Store the signed message on the server. * If n-f messages arrived send it to client. * * @param snapShotId the snapShotId as key. * @param signature the signature * @param context the message context. * @param decision the decision. * @param message the message. */ private void storeSignedMessage( final Long snapShotId, final byte[] signature, @NotNull final MessageContext context, final String decision, final byte[] message, final List<IOperation> writeSet) { final SignatureStorage signatureStorage; if (signatureStorageCache.getIfPresent(snapShotId) == null) { signatureStorage = new SignatureStorage(super.getReplica().getReplicaContext().getStaticConfiguration().getN() - 1, message, decision); signatureStorageCache.put(snapShotId, signatureStorage); Log.getLogger().info("Replica: " + id + " did not have the transaction prepared. Might be slow or corrupted, message size stored: " + message.length); } else { signatureStorage = signatureStorageCache.getIfPresent(snapShotId); if (!signatureStorage.getDecision().equals(decision)) { Log.getLogger().error("Different decision"); } } if (signatureStorage.getMessage().length != message.length) { Log.getLogger().error("Message in signatureStorage: " + signatureStorage.getMessage().length + " message of writing server " + message.length); final KryoPool pool = new KryoPool.Builder(super.getFactory()).softReferences().build(); final Kryo kryo = pool.borrow(); Log.getLogger().warn("Start logging"); final Input input = new Input(new ByteBufferInput(signatureStorage.getMessage())); kryo.readObject(input, String.class); kryo.readObject(input, String.class); final Long snapShotId2 = kryo.readObject(input, Long.class); Log.getLogger().warn("SnapshotId local: " + snapShotId2 + " snapshotId received: " + snapShotId); final List lWriteSet = kryo.readObject(input, ArrayList.class); Log.getLogger().warn("WriteSet received: " + writeSet.size()); for (IOperation op : writeSet) { if (op instanceof UpdateOperation) { Log.getLogger().warn("Update"); Log.getLogger().warn(((UpdateOperation) op).getKey().toString()); Log.getLogger().warn(((UpdateOperation) op).getValue().toString()); } else if (op instanceof DeleteOperation) { Log.getLogger().warn("Delete"); Log.getLogger().warn(((DeleteOperation) op).getObject().toString()); } else if (op instanceof CreateOperation) { Log.getLogger().warn("Create"); Log.getLogger().warn(((CreateOperation) op).getObject().toString()); } } final ArrayList<IOperation> localWriteSet2; input.close(); try { localWriteSet2 = (ArrayList<IOperation>) lWriteSet; } catch (ClassCastException e) { Log.getLogger().warn("Couldn't convert received signature message.", e); return; } Log.getLogger().warn("WriteSet local: " + localWriteSet2.size()); for (IOperation op : localWriteSet2) { if (op instanceof UpdateOperation) { Log.getLogger().warn("Update"); Log.getLogger().warn(((UpdateOperation) op).getKey().toString()); Log.getLogger().warn(((UpdateOperation) op).getValue().toString()); } else if (op instanceof DeleteOperation) { Log.getLogger().warn("Delete"); Log.getLogger().warn(((DeleteOperation) op).getObject().toString()); } else if (op instanceof CreateOperation) { Log.getLogger().warn("Create"); Log.getLogger().warn(((CreateOperation) op).getObject().toString()); } } Log.getLogger().warn("End logging"); } if (!decision.equals(signatureStorage.getDecision())) { Log.getLogger().warn("Replica: " + id + " did receive a different decision of replica: " + context.getSender() + ". Might be corrupted."); return; } signatureStorage.addSignatures(context.getSender(), signature); Log.getLogger().info("Adding signature to signatureStorage, has: " + signatureStorage.getSignatures().size() + " is: " + signatureStorage.isProcessed() + " by: " + context.getSender()); if (signatureStorage.hasEnough()) { Log.getLogger().info("Sending update to slave signed by all members: " + snapShotId); if (signatureStorage.isProcessed()) { updateSlave(signatureStorage); signatureStorage.setDistributed(); } if (signatureStorage.hasAll() && signatureStorage.isDistributed()) { signatureStorageCache.invalidate(snapShotId); return; } } signatureStorageCache.put(snapShotId, signatureStorage); } /** * Update the slave with a transaction. * * @param signatureStorage the signatureStorage with message and signatures.. */ private void updateSlave(final SignatureStorage signatureStorage) { if (this.wrapper.getLocalCLuster() != null) { this.wrapper.getLocalCLuster().propagateUpdate(new SignatureStorage(signatureStorage)); } } /** * Invoke a message to the global cluster. * * @param input the input object. * @return the response. */ public Output invokeGlobally(final Input input) { return new Output(proxy.invokeOrdered(input.getBuffer())); } /** * Closes the global cluster and his code. */ public void close() { super.terminate(); proxy.close(); } }
more logging
src/main/java/main/java/com/bag/server/GlobalClusterSlave.java
more logging
<ide><path>rc/main/java/main/java/com/bag/server/GlobalClusterSlave.java <ide> <ide> if(!localWriteSet.isEmpty()) <ide> { <del> Log.getLogger().warn("Aborting!!!"); <add> Log.getLogger().warn("Aborting of: " + getGlobalSnapshotId()); <ide> for(IOperation operation: localWriteSet) <ide> { <ide> Log.getLogger().warn(operation.toString()); <ide> <ide> if (!localWriteSet.isEmpty()) <ide> { <del> Log.getLogger().warn("Comitting!!!"); <add> Log.getLogger().warn("Comitting: " + getGlobalSnapshotId()); <ide> for(IOperation operation: localWriteSet) <ide> { <ide> Log.getLogger().warn(operation.toString());
Java
mit
7fdb93371c862110133abda23c59b89c45171be1
0
dom96/BrokenBonez
package com.dragonfruitstudios.brokenbonez.Math.Collisions; import android.graphics.*; import android.graphics.Rect; import com.dragonfruitstudios.brokenbonez.Game.Drawable; import com.dragonfruitstudios.brokenbonez.Game.GameView; import com.dragonfruitstudios.brokenbonez.Game.Graphics; import com.dragonfruitstudios.brokenbonez.Math.VectorF; import java.util.ArrayList; import java.util.Arrays; /** * This class implements an irregular shape composed of multiple line segments. It also implements * collision detection between this shape and other shapes. */ public class Polygon extends Intersector implements Drawable { private ArrayList<Line> lines; private VectorF size; private RectF rect; private android.graphics.RectF calcRect() { // TODO: This code sucks. Clean it up. But make sure tests pass. float minX = Float.MAX_VALUE; float maxX = -Float.MAX_VALUE; float minY = Float.MAX_VALUE; float maxY = -Float.MAX_VALUE; for (Line l : lines) { minX = Math.min(minX, l.getStart().x); maxX = Math.max(maxX, l.getStart().x); minY = Math.min(minY, l.getStart().y); maxY = Math.max(maxY, l.getStart().y); minX = Math.min(minX, l.getFinish().x); maxX = Math.max(maxX, l.getFinish().x); minY = Math.min(minY, l.getFinish().y); maxY = Math.max(maxY, l.getFinish().y); } return new RectF(minX, minY, maxX, maxY); } private VectorF calcSize() { return new VectorF(rect.right - rect.left, rect.bottom - rect.top); } protected Polygon() { this.lines = new ArrayList<Line>(); } public Polygon(Line[] lines) { this.lines = new ArrayList<Line>(Arrays.asList(lines)); rect = calcRect(); size = calcSize(); } public Polygon(ArrayList<Line> lines) { this.lines = lines; rect = calcRect(); size = calcSize(); } protected void addVertices(VectorF[] vertices) { for (int i = 0; i < vertices.length-1; i++) { lines.add(new Line(vertices[i].copy(), vertices[i+1].copy())); } lines.add(new Line(vertices[vertices.length - 1], vertices[0].copy())); recalculateBounds(); } /** * Checks if the specified shape collides with this Polygon. * @return A Manifold containing information about the collision. */ @Override public Manifold.Collection collisionTest(Intersector shape) { if (shape instanceof Circle) { return ((Circle)shape).collisionTest(this); } else if (shape instanceof Polygon) { return ((Polygon)shape).collisionTestWithPolygon(this); } return collisionNotImplemented(shape); } private Manifold.Collection collisionTestWithLine(Line line) { Manifold.Collection result = this.collisionTest(line.getStart()); result.addAll(this.collisionTest(line.getFinish())); return result; } public Manifold.Collection collisionTestWithPolygon(Polygon shape) { // To determine whether two Polygon's intersect we simply check each vertex inside each // Polygon and see if it is inside the other Polygon using `collisionTest`. // TODO: There is likely a more efficient way of testing whether two Polygons intersect. // TODO: Test this method! Manifold.Collection result = new Manifold.Collection(); for (Line l : shape.lines) { result.addAll(this.collisionTestWithLine(l)); } for (Line l : this.lines) { result.addAll(shape.collisionTestWithLine(l)); } return result; } /** * Checks whether `point` collides with this Polygon (when `point` is inside the Polygon then * True is also returned). * * Implementation details: * * This implementation uses two methods to check whether `point` collides with this * polygon. The first is a simple check to determine if `point` lies on any of the * Polygon's edges. The second is the use of the Even-odd rule, which allows us to check * whether `point` is inside the Polygon. It does so by drawing a ray from `point` to * infinity in an arbitrary direction and counting the number of times the ray crosses with * the Polygon's edges. If the number is odd then the point is inside. */ public Manifold.Collection collisionTest(VectorF point) { // TODO: This implementation may be too slow when more complex polygons are involved. Manifold.Collection result = new Manifold.Collection(); boolean odd = false; for (Line l : lines) { // Check if `point` is on the line segment `l`. Manifold res = l.collisionTest(point); if (res.hasCollided()) { result.add(res); } // Code carefully translated from the C code available here: // https://www.ecse.rpi.edu/Homepages/wrf/Research/Short_Notes/pnpoly.html // I reused the for loop for efficiency and it seems to be working well. // The following code draws a ray from `point` to infinity and checks if it // collides with `l`. if (l.getStart().y > point.y != l.getFinish().y > point.y) { if (point.x < (l.getFinish().x - l.getStart().x) * (point.y - l.getStart().y) / (l.getFinish().y - l.getStart().y) + l.getStart().x) { odd = !odd; } } } // Check if ray from `point` to infinity collided with an odd number of lines. // If so, this suggests that the point is inside the Polygon (See Even-odd rule). if (odd && !result.hasCollisions()) { // Need to find the normal and penetration depth. // Do this by finding the line closest to `point`. Line closestLine = lines.get(0); float closestDist = lines.get(0).distanceSquared(point); for (int i = 1; i < lines.size(); i++) { float dist = lines.get(i).distanceSquared(point); if (dist < closestDist) { closestLine = lines.get(i); closestDist = dist; } } // TODO: Calculate normal correctly. The following approximation works rather // TODO: well though. result.add(new Manifold(new VectorF(0, 1), (float)Math.sqrt(closestDist), true)); } return result; } /** * Determines whether `point` collides with this polygon's edges. * * Warning: This only checks whether `point` collides with the edges of this polygon, i.e. * if it's inside it `false` may be returned. */ public boolean collidesWith(VectorF point) { for (Line l : lines) { if (l.collidesWith(point)) { return true; } } return false; } public float distanceSquared(VectorF point) { float result = -1; for (Line l : lines) { float temp = l.distanceSquared(point); if (result == -1 || temp < result) { result = temp; } } return result; } public ArrayList<Line> getLines() { return lines; } public void setLines(ArrayList<Line> lines) { this.lines = lines; } public Intersector copy() { return new Polygon(new ArrayList<Line>(lines)); } public VectorF getPos() { return lines.get(0).getPos(); } public VectorF getSize() { return size; } public RectF getRect() { return rect; } public void recalculateBounds() { rect = calcRect(); size = calcSize(); } /** This method is used to show where the polygon is on the screen, for debugging purposes only. */ public void draw(GameView view) { if (Graphics.drawDebugInfo) { for (Line l : lines) { l.draw(view); } // Draw the size of the polygon somewhere beside it. view.drawText(getSize().x + "x" + getSize().y, lines.get(0).getPos().x - 5, lines.get(0).getPos().y - 10, Color.RED); } } }
BrokenBonez/app/src/main/java/com/dragonfruitstudios/brokenbonez/Math/Collisions/Polygon.java
package com.dragonfruitstudios.brokenbonez.Math.Collisions; import android.graphics.*; import android.graphics.Rect; import com.dragonfruitstudios.brokenbonez.Game.Drawable; import com.dragonfruitstudios.brokenbonez.Game.GameView; import com.dragonfruitstudios.brokenbonez.Game.Graphics; import com.dragonfruitstudios.brokenbonez.Math.VectorF; import java.util.ArrayList; import java.util.Arrays; /** * This class implements an irregular shape composed of multiple line segments. It also implements * collision detection between this shape and other shapes. */ public class Polygon extends Intersector implements Drawable { private ArrayList<Line> lines; private VectorF size; private RectF rect; private android.graphics.RectF calcRect() { // TODO: This code sucks. Clean it up. But make sure tests pass. float minX = Float.MAX_VALUE; float maxX = -Float.MAX_VALUE; float minY = Float.MAX_VALUE; float maxY = -Float.MAX_VALUE; for (Line l : lines) { minX = Math.min(minX, l.getStart().x); maxX = Math.max(maxX, l.getStart().x); minY = Math.min(minY, l.getStart().y); maxY = Math.max(maxY, l.getStart().y); minX = Math.min(minX, l.getFinish().x); maxX = Math.max(maxX, l.getFinish().x); minY = Math.min(minY, l.getFinish().y); maxY = Math.max(maxY, l.getFinish().y); } return new RectF(minX, minY, maxX, maxY); } private VectorF calcSize() { return new VectorF(rect.right - rect.left, rect.bottom - rect.top); } protected Polygon() { this.lines = new ArrayList<Line>(); } public Polygon(Line[] lines) { this.lines = new ArrayList<Line>(Arrays.asList(lines)); rect = calcRect(); size = calcSize(); } public Polygon(ArrayList<Line> lines) { this.lines = lines; rect = calcRect(); size = calcSize(); } protected void addVertices(VectorF[] vertices) { for (int i = 0; i < vertices.length-1; i++) { lines.add(new Line(vertices[i].copy(), vertices[i+1].copy())); } lines.add(new Line(vertices[vertices.length - 1], vertices[0].copy())); } /** * Checks if the specified shape collides with this Polygon. * @return A Manifold containing information about the collision. */ @Override public Manifold.Collection collisionTest(Intersector shape) { if (shape instanceof Circle) { return ((Circle)shape).collisionTest(this); } else if (shape instanceof Polygon) { return ((Polygon)shape).collisionTestWithPolygon(this); } return collisionNotImplemented(shape); } private Manifold.Collection collisionTestWithLine(Line line) { Manifold.Collection result = this.collisionTest(line.getStart()); result.addAll(this.collisionTest(line.getFinish())); return result; } public Manifold.Collection collisionTestWithPolygon(Polygon shape) { // To determine whether two Polygon's intersect we simply check each vertex inside each // Polygon and see if it is inside the other Polygon using `collisionTest`. // TODO: There is likely a more efficient way of testing whether two Polygons intersect. // TODO: Test this method! Manifold.Collection result = new Manifold.Collection(); for (Line l : shape.lines) { result.addAll(this.collisionTestWithLine(l)); } for (Line l : this.lines) { result.addAll(shape.collisionTestWithLine(l)); } return result; } /** * Checks whether `point` collides with this Polygon (when `point` is inside the Polygon then * True is also returned). * * Implementation details: * * This implementation uses two methods to check whether `point` collides with this * polygon. The first is a simple check to determine if `point` lies on any of the * Polygon's edges. The second is the use of the Even-odd rule, which allows us to check * whether `point` is inside the Polygon. It does so by drawing a ray from `point` to * infinity in an arbitrary direction and counting the number of times the ray crosses with * the Polygon's edges. If the number is odd then the point is inside. */ public Manifold.Collection collisionTest(VectorF point) { // TODO: This implementation may be too slow when more complex polygons are involved. Manifold.Collection result = new Manifold.Collection(); boolean odd = false; for (Line l : lines) { // Check if `point` is on the line segment `l`. Manifold res = l.collisionTest(point); if (res.hasCollided()) { result.add(res); } // Code carefully translated from the C code available here: // https://www.ecse.rpi.edu/Homepages/wrf/Research/Short_Notes/pnpoly.html // I reused the for loop for efficiency and it seems to be working well. // The following code draws a ray from `point` to infinity and checks if it // collides with `l`. if (l.getStart().y > point.y != l.getFinish().y > point.y) { if (point.x < (l.getFinish().x - l.getStart().x) * (point.y - l.getStart().y) / (l.getFinish().y - l.getStart().y) + l.getStart().x) { odd = !odd; } } } // Check if ray from `point` to infinity collided with an odd number of lines. // If so, this suggests that the point is inside the Polygon (See Even-odd rule). if (odd && !result.hasCollisions()) { // Need to find the normal and penetration depth. // Do this by finding the line closest to `point`. Line closestLine = lines.get(0); float closestDist = lines.get(0).distanceSquared(point); for (int i = 1; i < lines.size(); i++) { float dist = lines.get(i).distanceSquared(point); if (dist < closestDist) { closestLine = lines.get(i); closestDist = dist; } } // TODO: Calculate normal correctly. The following approximation works rather // TODO: well though. result.add(new Manifold(new VectorF(0, 1), (float)Math.sqrt(closestDist), true)); } return result; } /** * Determines whether `point` collides with this polygon's edges. * * Warning: This only checks whether `point` collides with the edges of this polygon, i.e. * if it's inside it `false` may be returned. */ public boolean collidesWith(VectorF point) { for (Line l : lines) { if (l.collidesWith(point)) { return true; } } return false; } public float distanceSquared(VectorF point) { float result = -1; for (Line l : lines) { float temp = l.distanceSquared(point); if (result == -1 || temp < result) { result = temp; } } return result; } public ArrayList<Line> getLines() { return lines; } public void setLines(ArrayList<Line> lines) { this.lines = lines; } public Intersector copy() { return new Polygon(new ArrayList<Line>(lines)); } public VectorF getPos() { return lines.get(0).getPos(); } public VectorF getSize() { return size; } public RectF getRect() { return rect; } public void recalculateBounds() { rect = calcRect(); size = calcSize(); } /** This method is used to show where the polygon is on the screen, for debugging purposes only. */ public void draw(GameView view) { if (Graphics.drawDebugInfo) { for (Line l : lines) { l.draw(view); } // Draw the size of the polygon somewhere beside it. view.drawText(getSize().x + "x" + getSize().y, lines.get(0).getPos().x - 5, lines.get(0).getPos().y - 10, Color.RED); } } }
RecalculateBounds on addVertices call.
BrokenBonez/app/src/main/java/com/dragonfruitstudios/brokenbonez/Math/Collisions/Polygon.java
RecalculateBounds on addVertices call.
<ide><path>rokenBonez/app/src/main/java/com/dragonfruitstudios/brokenbonez/Math/Collisions/Polygon.java <ide> lines.add(new Line(vertices[i].copy(), vertices[i+1].copy())); <ide> } <ide> lines.add(new Line(vertices[vertices.length - 1], vertices[0].copy())); <add> recalculateBounds(); <ide> } <ide> <ide> /**
JavaScript
mit
61ba76fa608871df1038c8d766d621f133e2412a
0
nodeca/nodeca.core,nodeca/nodeca.core
/** * nodeca.io * * This module provides realtime communication methods for nodeca/nlib based * applications. **/ //= depend_on nodeca //= require faye-browser /*global window, $, _, Faye, nodeca*/ (function () { 'use strict'; var // registered events events = {}, // underlying bayeux client bayeux = null, // whenever transport is up or not is_connected = false, // api3 related (used by apiTree() send/receive calls) properies api3 = { req_channel: '/x/api3-req/' + window.REALTIME_ID, res_channel: '/x/api3-res/' + window.REALTIME_ID, callbacks: {}, last_msg_id: 0 }; // exported IO object var io = nodeca.io = {}; // // Errors // io.ENOCONN = 'IO_ENOCONN'; io.ETIMEOUT = 'IO_ETIMEOUT'; io.EWRONGVER = 'IO_EWRONGVER'; // error constructor function ioerr(code, message) { var err = new Error(message); err.code = code; return err; } // // Events // // executes all handlers registered for given `event` function emit(event, args) { _.each(events[event] || [], function (handler) { handler.apply(null, args); }); } /** * nodeca.io.on(event, handler) -> Void * - event (String) * - handler (Function) * * Registers `handler` for an `event`. * * * ##### Known events * * - `api3:version-mismatch(versions)` **/ io.on = function on(event, handler) { if (!events[event]) { events[event] = []; } events[event].push(handler); }; /** * nodeca.io.off(event[, handler]) -> Void * - event (String) * - handler (Function) * * Unsubscribes `handler` (or all handlers) from specified `event`. * * * ##### See also * * - [nodeca.io.on] **/ io.off = function off(event, handler) { events[event] = (!handler) ? [] : _.without(events[event], handler); }; // // Main API // function bayeux_call(name, args) { var result = bayeux[name].apply(bayeux, args); // // provide jQuery.Defered style methods // // FAYE DOCUMENTATION: // // Bear in mind that ‘success’ here just means the server received and // routed the message successfully, not that it has been received by all // other clients. result.done = _.bind(function (fn) { this.callback(fn); return this; }, result); // FAYE DOCUMENTATION: // // An error means the server found a problem processing the message. // Network errors are not covered by this. result.fail = _.bind(function (fn) { this.errback(fn); return this; }, result); return result; } /** * nodeca.io.subscribe(channel, handler) -> Object **/ io.subscribe = function subscribe(channel, handler) { return bayeux_call('subscribe', [channel, handler]); }; /** * nodeca.io.unsubscribe(channel[, handler]) -> Object **/ io.unsubscribe = function unsubscribe(channel, handler) { return bayeux_call('unsubscribe', [channel, handler]); }; /** * nodeca.io.publish(channel, message) -> Object **/ io.publish = function publish(channel, message) { return bayeux_call('publish', [channel, message]); }; /** * nodeca.io.apiTree(name, params, options, callback) -> Void * nodeca.io.apiTree(name, params[, callback]) -> Void * nodeca.io.apiTree(name, callback) -> Void **/ io.apiTree = function apiTree(name, params, options, callback) { var timeout, id = api3.last_msg_id++, data = {id: id}; // Scenario: rpc(name, callback); if (_.isFunction(params)) { callback = params; params = options = {}; } // Scenario: rpc(name, params[, callback]); if (_.isFunction(options)) { callback = options; options = {}; } // fill in defaults options = options || {}; callback = callback || $.noop; // // ERROR HANDLING // // - when there's no connection (or client is `connecting`), we execute // callback with `ENOCONN` error imediately // - when connection lost during waiting for server to send a message into // response channel, we execute calback with `ECONNGONE` error // - when server didn't received published request message within 30 seconds // we execute callback with `ETIMEOUT` error // // check if there an active connection if (!is_connected) { callback(ioerr(io.ENOCONN, 'No connection to the server (RT).')); return; } // fill in message data.msg = { version: nodeca.runtime.version, method: name, params: params }; // stop timer function stop_timer() { clearTimeout(timeout); // stop timeout counter timeout = null; // mark timeout as "removed" } // simple error handler function handle_error(err) { stop_timer(); delete api3.callbacks[id]; callback(err); } // handle transport down during request error function handle_transport_down() { // mimics `once()` event listener bayeux.unbind('transport:down', handle_transport_down); handle_error(ioerr(io.ECONNGONE, 'Server gone. (RT)')); } bayeux.bind('transport:down', handle_transport_down); // store callback for the response api3.callbacks[id] = function (msg) { stop_timer(); bayeux.unbind('transport:down', handle_transport_down); if (msg.version !== nodeca.runtime.version) { // emit version mismatch error emit('api3:version-mismatch', { client: nodeca.runtime.version, server: msg.version }); callback(ioerr(io.EWRONGVER, 'Client version does not match server.')); return; } // run actual callback callback(msg.err, msg.result); }; // wait for successfull message delivery 10 seconds timeout = setTimeout(function () { handle_error(ioerr(io.ETIMEOUT, 'Timeout ' + name + ' execution.')); }, 10000); // send request bayeux_call('publish', [api3.req_channel, data]) // see bayeux_call info for details on fail/done .fail(handle_error) .done(stop_timer); }; // // Initialization API // /** * nodeca.io.auth(callback) -> Void **/ io.auth = function (callback) { // Not implemented yet callback(null); }; // responses listener function handle_api3_response(data) { var callback = api3.callbacks[data.id]; if (!callback) { // unknown response id return; } delete api3.callbacks[data.id]; callback(data.msg); } /** * nodeca.io.init() -> Void **/ io.init = function () { bayeux = new Faye.Client('/faye'); if ('development' === nodeca.runtime.env) { // export some internals for debugging window.fontello_bayeux = bayeux; } // // once connected, client.getState() always returns 'CONNECTED' regardless // to the real state, so instead of relying on this state we use our own // bayeux.bind('transport:up', function () { is_connected = true; emit('connected'); }); bayeux.bind('transport:down', function () { is_connected = false; emit('disconnected'); }); // // faye handles reconnection on it's own: // https://groups.google.com/d/msg/faye-users/NJPd3v98zjY/hyGpoat5Of0J // bayeux.subscribe(api3.res_channel, handle_api3_response); }; }());
assets/js/nodeca/io.js
/** * nodeca.io * * This module provides realtime communication methods for nodeca/nlib based * applications. **/ /*global window, $, _, Faye, nodeca*/ //= depend_on nodeca //= require faye-browser (function () { 'use strict'; var // registered events events = {}, // underlying bayeux client bayeux = null, // whenever transport is up or not is_connected = false, // api3 related (used by apiTree() send/receive calls) properies api3 = { req_channel: '/x/api3-req/' + window.REALTIME_ID, res_channel: '/x/api3-res/' + window.REALTIME_ID, callbacks: {}, last_msg_id: 0 }; // exported IO object var io = nodeca.io = {}; // // Errors // io.ENOCONN = 'IO_ENOCONN'; io.ETIMEOUT = 'IO_ETIMEOUT'; io.EWRONGVER = 'IO_EWRONGVER'; // error constructor function ioerr(code, message) { var err = new Error(message); err.code = code; return err; } // // Events // // executes all handlers registered for given `event` function emit(event, args) { _.each(events[event] || [], function (handler) { handler.apply(null, args); }); } /** * nodeca.io.on(event, handler) -> Void * - event (String) * - handler (Function) * * Registers `handler` for an `event`. * * * ##### Known events * * - `api3:version-mismatch(versions)` **/ io.on = function on(event, handler) { if (!events[event]) { events[event] = []; } events[event].push(handler); }; /** * nodeca.io.off(event[, handler]) -> Void * - event (String) * - handler (Function) * * Unsubscribes `handler` (or all handlers) from specified `event`. * * * ##### See also * * - [nodeca.io.on] **/ io.off = function off(event, handler) { events[event] = (!handler) ? [] : _.without(events[event], handler); }; // // Main API // function bayeux_call(name, args) { var result = bayeux[name].apply(bayeux, args); // // provide jQuery.Defered style methods // // FAYE DOCUMENTATION: // // Bear in mind that ‘success’ here just means the server received and // routed the message successfully, not that it has been received by all // other clients. result.done = _.bind(function (fn) { this.callback(fn); return this; }, result); // FAYE DOCUMENTATION: // // An error means the server found a problem processing the message. // Network errors are not covered by this. result.fail = _.bind(function (fn) { this.errback(fn); return this; }, result); return result; } /** * nodeca.io.subscribe(channel, handler) -> Object **/ io.subscribe = function subscribe(channel, handler) { return bayeux_call('subscribe', [channel, handler]); }; /** * nodeca.io.unsubscribe(channel[, handler]) -> Object **/ io.unsubscribe = function unsubscribe(channel, handler) { return bayeux_call('unsubscribe', [channel, handler]); }; /** * nodeca.io.publish(channel, message) -> Object **/ io.publish = function publish(channel, message) { return bayeux_call('publish', [channel, message]); }; /** * nodeca.io.apiTree(name, params[, options][, callback]) -> Void **/ io.apiTree = function apiTree(name, params, options, callback) { var timeout, id = api3.last_msg_id++, data = {id: id}; // Scenario: rpc(name, params, callback); if (_.isFunction(options)) { callback = options; options = {}; } // fill in defaults options = options || {}; callback = callback || $.noop; // // ERROR HANDLING // // - when there's no connection (or client is `connecting`), we execute // callback with `ENOCONN` error imediately // - when connection lost during waiting for server to send a message into // response channel, we execute calback with `ECONNGONE` error // - when server didn't received published request message within 30 seconds // we execute callback with `ETIMEOUT` error // // check if there an active connection if (!is_connected) { callback(ioerr(io.ENOCONN, 'No connection to the server (RT).')); return; } // fill in message data.msg = { version: nodeca.runtime.version, method: name, params: params }; // stop timer function stop_timer() { clearTimeout(timeout); // stop timeout counter timeout = null; // mark timeout as "removed" } // simple error handler function handle_error(err) { stop_timer(); delete api3.callbacks[id]; callback(err); } // handle transport down during request error function handle_transport_down() { // mimics `once()` event listener bayeux.unbind('transport:down', handle_transport_down); handle_error(ioerr(io.ECONNGONE, 'Server gone. (RT)')); } bayeux.bind('transport:down', handle_transport_down); // store callback for the response api3.callbacks[id] = function (msg) { stop_timer(); bayeux.unbind('transport:down', handle_transport_down); if (msg.version !== nodeca.runtime.version) { // emit version mismatch error emit('api3:version-mismatch', { client: nodeca.runtime.version, server: msg.version }); callback(ioerr(io.EWRONGVER, 'Client version does not match server.')); return; } // run actual callback callback(msg.err, msg.result); }; // wait for successfull message delivery 10 seconds timeout = setTimeout(function () { handle_error(ioerr(io.ETIMEOUT, 'Timeout ' + name + ' execution.')); }, 10000); // send request bayeux_call('publish', [api3.req_channel, data]) // see bayeux_call info for details on fail/done .fail(handle_error) .done(stop_timer); }; // // Initialization API // /** * nodeca.io.auth(callback) -> Void **/ io.auth = function (callback) { // Not implemented yet callback(null); }; // responses listener function handle_api3_response(data) { var callback = api3.callbacks[data.id]; if (!callback) { // unknown response id return; } delete api3.callbacks[data.id]; callback(data.msg); } /** * nodeca.io.init() -> Void **/ io.init = function () { bayeux = new Faye.Client('/faye'); if ('development' === nodeca.runtime.env) { // export some internals for debugging window.fontello_bayeux = bayeux; } // // once connected, client.getState() always returns 'CONNECTED' regardless // to the real state, so instead of relying on this state we use our own // bayeux.bind('transport:up', function () { is_connected = true; emit('connected'); }); bayeux.bind('transport:down', function () { is_connected = false; emit('disconnected'); }); // // faye handles reconnection on it's own: // https://groups.google.com/d/msg/faye-users/NJPd3v98zjY/hyGpoat5Of0J // bayeux.subscribe(api3.res_channel, handle_api3_response); }; }());
fix apiTree call signature
assets/js/nodeca/io.js
fix apiTree call signature
<ide><path>ssets/js/nodeca/io.js <ide> **/ <ide> <ide> <del>/*global window, $, _, Faye, nodeca*/ <del> <del> <ide> //= depend_on nodeca <ide> //= require faye-browser <add> <add> <add>/*global window, $, _, Faye, nodeca*/ <ide> <ide> <ide> (function () { <ide> <ide> <ide> /** <del> * nodeca.io.apiTree(name, params[, options][, callback]) -> Void <add> * nodeca.io.apiTree(name, params, options, callback) -> Void <add> * nodeca.io.apiTree(name, params[, callback]) -> Void <add> * nodeca.io.apiTree(name, callback) -> Void <ide> **/ <ide> io.apiTree = function apiTree(name, params, options, callback) { <ide> var timeout, id = api3.last_msg_id++, data = {id: id}; <ide> <del> // Scenario: rpc(name, params, callback); <add> // Scenario: rpc(name, callback); <add> if (_.isFunction(params)) { <add> callback = params; <add> params = options = {}; <add> } <add> <add> // Scenario: rpc(name, params[, callback]); <ide> if (_.isFunction(options)) { <ide> callback = options; <ide> options = {};
Java
apache-2.0
051222373a0e63cf593718610ad719d0397ac172
0
play1-maven-plugin/play1-maven-plugin
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.google.code.play; import java.io.File; import java.io.IOException; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Set; import org.apache.maven.artifact.Artifact; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.codehaus.plexus.archiver.Archiver; import org.codehaus.plexus.archiver.ArchiverException; import org.codehaus.plexus.archiver.manager.ArchiverManager; import org.codehaus.plexus.archiver.manager.NoSuchArchiverException; /** * Package Play! application as a ZIP achive. * * @author <a href="mailto:[email protected]">Grzegorz Slowikowski</a> * @goal zip * @phase package * @requiresDependencyResolution runtime */ public class PlayZipMojo extends AbstractPlayMojo { /** * Application resources include filter * * @parameter expression="${play.zipIncludes}" default-value="app/**,conf/**,public/**,tags/**" * @since 1.0.0 */ private String zipIncludes; /** * Application resources exclude filter. * * @parameter expression="${play.zipExcludes}" default-value="" * @since 1.0.0 */ private String zipExcludes; /** * Should project dependencies ("lib" and "modules" directories) be packaged. * No include/exclude filters. * * @parameter expression="${play.zipDependencies}" default-value="false" * @since 1.0.0 */ private boolean zipDependencies; /** * To look up Archiver/UnArchiver implementations. * * @component role="org.codehaus.plexus.archiver.manager.ArchiverManager" * @required */ private ArchiverManager archiverManager; protected void internalExecute() throws MojoExecutionException, MojoFailureException, IOException { try { File baseDir = project.getBasedir(); File zipOutputDirectory = new File( project.getBuild().getDirectory() ); String zipName = project.getBuild().getFinalName(); File destFile = new File( zipOutputDirectory, zipName + ".zip" ); Archiver zipArchiver = archiverManager.getArchiver( "zip" ); zipArchiver.setDuplicateBehavior( Archiver.DUPLICATES_FAIL ); // Just in case zipArchiver.setDestFile( destFile ); getLog().debug( "Zip includes: " + zipIncludes ); getLog().debug( "Zip excludes: " + zipExcludes ); String[] includes = ( zipIncludes != null ? zipIncludes.split( "," ) : null ); String[] excludes = ( zipExcludes != null ? zipExcludes.split( "," ) : null ); zipArchiver.addDirectory( baseDir, includes, excludes ); if ( zipDependencies ) { Map<Artifact, String> moduleTypeArtifacts = processModuleDependencies( zipArchiver ); processJarDependencies( zipArchiver, moduleTypeArtifacts ); } zipArchiver.createArchive(); project.getArtifact().setFile( destFile ); } catch ( ArchiverException e ) { throw new MojoExecutionException( "?", e ); } catch ( NoSuchArchiverException e ) { throw new MojoExecutionException( "?", e ); } } private Map<Artifact, String> processModuleDependencies( Archiver archiver ) throws ArchiverException, NoSuchArchiverException, IOException { Map<Artifact, String> moduleTypeArtifacts = new HashMap<Artifact, String>(); Set<?> artifacts = project.getArtifacts(); for ( Iterator<?> iter = artifacts.iterator(); iter.hasNext(); ) { Artifact artifact = (Artifact) iter.next(); if ( "zip".equals( artifact.getType() ) ) { if ( "module".equals( artifact.getClassifier() ) || "module-min".equals( artifact.getClassifier() ) ) { processZipDependency( artifact, archiver, moduleTypeArtifacts ); } } else if ( "play".equals( artifact.getType() ) ) { processZipDependency( artifact, archiver, null ); // it's not necessary to add "play" type dependencies to "moduleTypeArtifacts" map } } return moduleTypeArtifacts; } private void processZipDependency( Artifact artifact, Archiver archiver, Map<Artifact, String> moduleTypeArtifacts ) throws ArchiverException, NoSuchArchiverException, IOException { // System.out.println("module: " + artifact.getGroupId() + ":" + artifact.getArtifactId()); // System.out.println( "artifact: groupId=" + artifact.getGroupId() + ":artifactId=" // + artifact.getArtifactId() + ":type=" + artifact.getType() + ":classifier=" // + artifact.getClassifier() + ":scope=" + artifact.getScope() ); File zipFile = artifact.getFile(); String moduleName = artifact.getArtifactId(); if ( moduleName.startsWith( "play-" ) ) { moduleName = moduleName.substring( "play-".length() ); } String moduleSubDir = String.format( "%s-%s", moduleName, artifact.getVersion() ); archiver.addArchivedFileSet( zipFile, "modules/" + moduleSubDir + "/" ); if ( moduleTypeArtifacts != null ) { moduleTypeArtifacts.put( artifact, moduleSubDir ); } } private void processJarDependencies( Archiver archiver, Map<Artifact, String> moduleTypeArtifacts ) throws ArchiverException, NoSuchArchiverException, IOException { Set<?> artifacts = project.getArtifacts(); for ( Iterator<?> iter = artifacts.iterator(); iter.hasNext(); ) { Artifact artifact = (Artifact) iter.next(); if ( "jar".equals( artifact.getType() ) ) { // System.out.println("jar: " + artifact.getGroupId() + ":" + artifact.getArtifactId()); File jarFile = artifact.getFile(); String libDir = "lib"; for ( Map.Entry<Artifact, String> moduleTypeArtifactEntry : moduleTypeArtifacts.entrySet() ) { Artifact moduleArtifact = moduleTypeArtifactEntry.getKey(); // System.out.println("checking module: " + moduleArtifact.getGroupId() + ":" + // moduleArtifact.getArtifactId()); if ( artifact.getGroupId().equals( moduleArtifact.getGroupId() ) && artifact.getArtifactId().equals( moduleArtifact.getArtifactId() ) ) { String moduleSubDir = moduleTypeArtifactEntry.getValue(); libDir = String.format( "modules/%s/lib", moduleSubDir ); // System.out.println("checked ok - lib is " + libDir.getCanonicalPath()); break; } } // System.out.println("jar: " + artifact.getGroupId() + ":" + artifact.getArtifactId() + " added to " + // libDir); archiver.addFile( jarFile, libDir + "/" + jarFile.getName() ); } } } } // TODO - add name conflicts detection for modules and jars
play-maven-plugin/src/main/java/com/google/code/play/PlayZipMojo.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.google.code.play; import java.io.File; import java.io.IOException; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Set; import org.apache.maven.artifact.Artifact; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.codehaus.plexus.archiver.Archiver; import org.codehaus.plexus.archiver.ArchiverException; import org.codehaus.plexus.archiver.manager.ArchiverManager; import org.codehaus.plexus.archiver.manager.NoSuchArchiverException; /** * Package Play! application as a ZIP achive. * * @author <a href="mailto:[email protected]">Grzegorz Slowikowski</a> * @goal zip * @phase package * @requiresDependencyResolution runtime */ public class PlayZipMojo extends AbstractPlayMojo { /** * Application resources include filter * * @parameter expression="${play.zipIncludes}" default-value="app/**,conf/**,public/**,tags/**" * @since 1.0.0 */ private String zipIncludes; /** * Application resources exclude filter. * * @parameter expression="${play.zipExcludes}" default-value="" * @since 1.0.0 */ private String zipExcludes; /** * Should project dependencies ("lib" and "modules" directories) be packaged. * No include/exclude filters. Use "provided" scope to exclude from packaging. * * @parameter expression="${play.zipDependencies}" default-value="false" * @since 1.0.0 */ private boolean zipDependencies; /** * To look up Archiver/UnArchiver implementations. * * @component role="org.codehaus.plexus.archiver.manager.ArchiverManager" * @required */ private ArchiverManager archiverManager; protected void internalExecute() throws MojoExecutionException, MojoFailureException, IOException { try { File baseDir = project.getBasedir(); File zipOutputDirectory = new File( project.getBuild().getDirectory() ); String zipName = project.getBuild().getFinalName(); File destFile = new File( zipOutputDirectory, zipName + ".zip" ); Archiver zipArchiver = archiverManager.getArchiver( "zip" ); zipArchiver.setDuplicateBehavior( Archiver.DUPLICATES_FAIL ); // Just in case zipArchiver.setDestFile( destFile ); getLog().debug( "Zip includes: " + zipIncludes ); getLog().debug( "Zip excludes: " + zipExcludes ); String[] includes = ( zipIncludes != null ? zipIncludes.split( "," ) : null ); String[] excludes = ( zipExcludes != null ? zipExcludes.split( "," ) : null ); zipArchiver.addDirectory( baseDir, includes, excludes ); if ( zipDependencies ) { Map<Artifact, String> moduleTypeArtifacts = processModuleDependencies( zipArchiver ); processJarDependencies( zipArchiver, moduleTypeArtifacts ); } zipArchiver.createArchive(); project.getArtifact().setFile( destFile ); } catch ( ArchiverException e ) { throw new MojoExecutionException( "?", e ); } catch ( NoSuchArchiverException e ) { throw new MojoExecutionException( "?", e ); } } private Map<Artifact, String> processModuleDependencies( Archiver archiver ) throws ArchiverException, NoSuchArchiverException, IOException { Map<Artifact, String> moduleTypeArtifacts = new HashMap<Artifact, String>(); Set<?> artifacts = project.getArtifacts(); for ( Iterator<?> iter = artifacts.iterator(); iter.hasNext(); ) { Artifact artifact = (Artifact) iter.next(); if ( "zip".equals( artifact.getType() ) ) { if ( "module".equals( artifact.getClassifier() ) || "module-min".equals( artifact.getClassifier() ) ) { processZipDependency( artifact, archiver, moduleTypeArtifacts ); } } else if ( "play".equals( artifact.getType() ) ) { processZipDependency( artifact, archiver, null ); // it's not necessary to add "play" type dependencies to "moduleTypeArtifacts" map } } return moduleTypeArtifacts; } private void processZipDependency( Artifact artifact, Archiver archiver, Map<Artifact, String> moduleTypeArtifacts ) throws ArchiverException, NoSuchArchiverException, IOException { // System.out.println("module: " + artifact.getGroupId() + ":" + artifact.getArtifactId()); // System.out.println( "artifact: groupId=" + artifact.getGroupId() + ":artifactId=" // + artifact.getArtifactId() + ":type=" + artifact.getType() + ":classifier=" // + artifact.getClassifier() + ":scope=" + artifact.getScope() ); File zipFile = artifact.getFile(); String moduleName = artifact.getArtifactId(); if ( moduleName.startsWith( "play-" ) ) { moduleName = moduleName.substring( "play-".length() ); } String moduleSubDir = String.format( "%s-%s", moduleName, artifact.getVersion() ); archiver.addArchivedFileSet( zipFile, "modules/" + moduleSubDir + "/" ); if ( moduleTypeArtifacts != null ) { moduleTypeArtifacts.put( artifact, moduleSubDir ); } } private void processJarDependencies( Archiver archiver, Map<Artifact, String> moduleTypeArtifacts ) throws ArchiverException, NoSuchArchiverException, IOException { Set<?> artifacts = project.getArtifacts(); for ( Iterator<?> iter = artifacts.iterator(); iter.hasNext(); ) { Artifact artifact = (Artifact) iter.next(); if ( "jar".equals( artifact.getType() ) ) { // System.out.println("jar: " + artifact.getGroupId() + ":" + artifact.getArtifactId()); File jarFile = artifact.getFile(); String libDir = "lib"; for ( Map.Entry<Artifact, String> moduleTypeArtifactEntry : moduleTypeArtifacts.entrySet() ) { Artifact moduleArtifact = moduleTypeArtifactEntry.getKey(); // System.out.println("checking module: " + moduleArtifact.getGroupId() + ":" + // moduleArtifact.getArtifactId()); if ( artifact.getGroupId().equals( moduleArtifact.getGroupId() ) && artifact.getArtifactId().equals( moduleArtifact.getArtifactId() ) ) { String moduleSubDir = moduleTypeArtifactEntry.getValue(); libDir = String.format( "modules/%s/lib", moduleSubDir ); // System.out.println("checked ok - lib is " + libDir.getCanonicalPath()); break; } } // System.out.println("jar: " + artifact.getGroupId() + ":" + artifact.getArtifactId() + " added to " + // libDir); archiver.addFile( jarFile, libDir + "/" + jarFile.getName() ); } } } } // TODO - add name conflicts detection for modules and jars
Misleading comment removed. git-svn-id: f062fe73d040ecc33a0a0fdfc91cadea2c50f7be@6289 9c2ad861-7886-cb7c-dcd7-2b8d41cd5de0
play-maven-plugin/src/main/java/com/google/code/play/PlayZipMojo.java
Misleading comment removed.
<ide><path>lay-maven-plugin/src/main/java/com/google/code/play/PlayZipMojo.java <ide> <ide> /** <ide> * Should project dependencies ("lib" and "modules" directories) be packaged. <del> * No include/exclude filters. Use "provided" scope to exclude from packaging. <add> * No include/exclude filters. <ide> * <ide> * @parameter expression="${play.zipDependencies}" default-value="false" <ide> * @since 1.0.0
Java
mit
39ce30fde13668e131e2a5dd1755b879f8c1d793
0
InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service
package com.worth.ifs.project.transactional; import com.worth.ifs.BaseServiceUnitTest; import com.worth.ifs.address.domain.Address; import com.worth.ifs.address.domain.AddressType; import com.worth.ifs.address.resource.AddressResource; import com.worth.ifs.application.domain.Application; import com.worth.ifs.commons.error.CommonFailureKeys; import com.worth.ifs.commons.service.ServiceResult; import com.worth.ifs.file.domain.FileEntry; import com.worth.ifs.file.resource.FileEntryResource; import com.worth.ifs.file.service.FileAndContents; import com.worth.ifs.organisation.domain.OrganisationAddress; import com.worth.ifs.project.builder.MonitoringOfficerBuilder; import com.worth.ifs.project.domain.MonitoringOfficer; import com.worth.ifs.project.domain.Project; import com.worth.ifs.project.domain.ProjectUser; import com.worth.ifs.project.resource.MonitoringOfficerResource; import com.worth.ifs.project.resource.ProjectResource; import com.worth.ifs.project.resource.ProjectUserResource; import com.worth.ifs.user.domain.Organisation; import com.worth.ifs.user.domain.ProcessRole; import com.worth.ifs.user.domain.Role; import com.worth.ifs.user.domain.User; import com.worth.ifs.user.resource.UserRoleType; import org.apache.commons.lang3.tuple.Pair; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import java.io.File; import java.io.InputStream; import java.time.LocalDate; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.function.BiFunction; import java.util.function.Consumer; import java.util.function.Supplier; import static com.worth.ifs.LambdaMatcher.createLambdaMatcher; import static com.worth.ifs.address.builder.AddressBuilder.newAddress; import static com.worth.ifs.address.builder.AddressResourceBuilder.newAddressResource; import static com.worth.ifs.address.builder.AddressTypeBuilder.newAddressType; import static com.worth.ifs.address.resource.OrganisationAddressType.*; import static com.worth.ifs.application.builder.ApplicationBuilder.newApplication; import static com.worth.ifs.commons.error.CommonErrors.badRequestError; import static com.worth.ifs.commons.error.CommonErrors.notFoundError; import static com.worth.ifs.commons.error.CommonFailureKeys.*; import static com.worth.ifs.commons.service.ServiceResult.serviceSuccess; import static com.worth.ifs.file.domain.builders.FileEntryBuilder.newFileEntry; import static com.worth.ifs.file.resource.builders.FileEntryResourceBuilder.newFileEntryResource; import static com.worth.ifs.organisation.builder.OrganisationAddressBuilder.newOrganisationAddress; import static com.worth.ifs.project.builder.MonitoringOfficerResourceBuilder.newMonitoringOfficerResource; import static com.worth.ifs.project.builder.ProjectBuilder.newProject; import static com.worth.ifs.project.builder.ProjectResourceBuilder.newProjectResource; import static com.worth.ifs.project.builder.ProjectUserBuilder.newProjectUser; import static com.worth.ifs.project.builder.ProjectUserResourceBuilder.newProjectUserResource; import static com.worth.ifs.user.builder.OrganisationBuilder.newOrganisation; import static com.worth.ifs.user.builder.ProcessRoleBuilder.newProcessRole; import static com.worth.ifs.user.builder.RoleBuilder.newRole; import static com.worth.ifs.user.builder.UserBuilder.newUser; import static com.worth.ifs.user.resource.UserRoleType.*; import static com.worth.ifs.util.CollectionFunctions.simpleFilter; import static java.util.Arrays.asList; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static org.junit.Assert.*; import static org.mockito.Mockito.*; public class ProjectServiceImplTest extends BaseServiceUnitTest<ProjectService> { private Long projectId = 123L; private Long applicationId = 456L; private Long userId = 7L; private Long otherUserId = 8L; private Application application; private Organisation organisation; private Role leadApplicantRole; private Role projectManagerRole; private Role partnerRole; private User user; private ProcessRole leadApplicantProcessRole; private ProjectUser leadPartnerProjectUser; private Project project; private MonitoringOfficerResource monitoringOfficerResource; @Before public void setUp() { organisation = newOrganisation().build(); leadApplicantRole = newRole(LEADAPPLICANT).build(); projectManagerRole = newRole(PROJECT_MANAGER).build(); partnerRole = newRole(PARTNER).build(); user = newUser(). withid(userId). build(); leadApplicantProcessRole = newProcessRole(). withOrganisation(organisation). withRole(leadApplicantRole). withUser(user). build(); leadPartnerProjectUser = newProjectUser(). withOrganisation(organisation). withRole(partnerRole). withUser(user). build(); application = newApplication(). withId(applicationId). withProcessRoles(leadApplicantProcessRole). withName("My Application"). withDurationInMonths(5L). withStartDate(LocalDate.of(2017, 3, 2)). build(); project = newProject(). withId(projectId). withApplication(application). withProjectUsers(singletonList(leadPartnerProjectUser)). build(); monitoringOfficerResource = newMonitoringOfficerResource() .withProject(1L) .withFirstName("abc") .withLastName("xyz") .withEmail("[email protected]") .withPhoneNumber("078323455") .build(); when(applicationRepositoryMock.findOne(applicationId)).thenReturn(application); when(projectRepositoryMock.findOne(projectId)).thenReturn(project); } @Test public void testCreateProjectFromApplication() { Role partnerRole = newRole().withType(PARTNER).build(); ProjectResource newProjectResource = newProjectResource().build(); when(applicationRepositoryMock.findOne(applicationId)).thenReturn(application); Project savedProject = newProject().build(); when(roleRepositoryMock.findOneByName(PARTNER.getName())).thenReturn(partnerRole); Project newProjectExpectations = createProjectExpectationsFromOriginalApplication(application); when(projectRepositoryMock.save(newProjectExpectations)).thenReturn(savedProject); when(projectMapperMock.mapToResource(savedProject)).thenReturn(newProjectResource); ServiceResult<ProjectResource> project = service.createProjectFromApplication(applicationId); assertTrue(project.isSuccess()); assertEquals(newProjectResource, project.getSuccessObject()); } @Test public void testInvalidProjectManagerProvided() { ServiceResult<Void> result = service.setProjectManager(projectId, otherUserId); assertFalse(result.isSuccess()); assertTrue(result.getFailure().is(PROJECT_SETUP_PROJECT_MANAGER_MUST_BE_LEAD_PARTNER)); } @Test public void testSetProjectManagerWhenProjectDetailsAlreadySubmitted() { Project existingProject = newProject().withSubmittedDate(LocalDateTime.now()).build(); assertTrue(existingProject.getProjectUsers().isEmpty()); when(projectRepositoryMock.findOne(projectId)).thenReturn(existingProject); ServiceResult<Void> result = service.setProjectManager(projectId, userId); assertTrue(result.isFailure()); assertTrue(result.getFailure().is(PROJECT_SETUP_PROJECT_DETAILS_CANNOT_BE_UPDATED_IF_ALREADY_SUBMITTED)); assertTrue(existingProject.getProjectUsers().isEmpty()); } @Test public void testValidProjectManagerProvided() { when(roleRepositoryMock.findOneByName(PROJECT_MANAGER.getName())).thenReturn(projectManagerRole); ServiceResult<Void> result = service.setProjectManager(projectId, userId); assertTrue(result.isSuccess()); ProjectUser expectedProjectManager = newProjectUser(). withId(). withProject(project). withOrganisation(organisation). withRole(projectManagerRole). withUser(user). build(); assertEquals(expectedProjectManager, project.getProjectUsers().get(project.getProjectUsers().size() - 1)); } @Test public void testValidProjectManagerProvidedWithExistingProjectManager() { User differentUser = newUser().build(); Organisation differentOrganisation = newOrganisation().build(); @SuppressWarnings("unused") ProjectUser existingProjenullctManager = newProjectUser(). withId(456L). withProject(project). withRole(projectManagerRole). withOrganisation(differentOrganisation). withUser(differentUser). build(); when(roleRepositoryMock.findOneByName(PROJECT_MANAGER.getName())).thenReturn(projectManagerRole); ServiceResult<Void> result = service.setProjectManager(projectId, userId); assertTrue(result.isSuccess()); ProjectUser expectedProjectManager = newProjectUser(). withId(456L). withProject(project). withOrganisation(organisation). withRole(projectManagerRole). withUser(user). build(); assertEquals(expectedProjectManager, project.getProjectUsers().get(project.getProjectUsers().size() - 1)); } @Test public void testUpdateProjectStartDate() { LocalDate now = LocalDate.now(); LocalDate validDate = LocalDate.of(now.getYear(), now.getMonthValue(), 1).plusMonths(1); Project existingProject = newProject().build(); assertNull(existingProject.getTargetStartDate()); when(projectRepositoryMock.findOne(123L)).thenReturn(existingProject); ServiceResult<Void> updateResult = service.updateProjectStartDate(123L, validDate); assertTrue(updateResult.isSuccess()); verify(projectRepositoryMock).findOne(123L); assertEquals(validDate, existingProject.getTargetStartDate()); } @Test public void testUpdateProjectStartDateButProjectDoesntExist() { LocalDate now = LocalDate.now(); LocalDate validDate = LocalDate.of(now.getYear(), now.getMonthValue(), 1).plusMonths(1); when(projectRepositoryMock.findOne(123L)).thenReturn(null); ServiceResult<Void> updateResult = service.updateProjectStartDate(123L, validDate); assertTrue(updateResult.isFailure()); assertTrue(updateResult.getFailure().is(notFoundError(Project.class, 123L))); } @Test public void testUpdateProjectStartDateButStartDateDoesntBeginOnFirstDayOfMonth() { LocalDate now = LocalDate.now(); LocalDate dateNotOnFirstDayOfMonth = LocalDate.of(now.getYear(), now.getMonthValue(), 2).plusMonths(1); Project existingProject = newProject().build(); assertNull(existingProject.getTargetStartDate()); when(projectRepositoryMock.findOne(123L)).thenReturn(existingProject); ServiceResult<Void> updateResult = service.updateProjectStartDate(123L, dateNotOnFirstDayOfMonth); assertTrue(updateResult.isFailure()); assertTrue(updateResult.getFailure().is(PROJECT_SETUP_DATE_MUST_START_ON_FIRST_DAY_OF_MONTH)); verify(projectRepositoryMock, never()).findOne(123L); assertNull(existingProject.getTargetStartDate()); } @Test public void testUpdateProjectStartDateButStartDateNotInFuture() { LocalDate now = LocalDate.now(); LocalDate pastDate = LocalDate.of(now.getYear(), now.getMonthValue(), 1).minusMonths(1); Project existingProject = newProject().build(); assertNull(existingProject.getTargetStartDate()); when(projectRepositoryMock.findOne(123L)).thenReturn(existingProject); ServiceResult<Void> updateResult = service.updateProjectStartDate(123L, pastDate); assertTrue(updateResult.isFailure()); assertTrue(updateResult.getFailure().is(PROJECT_SETUP_DATE_MUST_BE_IN_THE_FUTURE)); verify(projectRepositoryMock, never()).findOne(123L); assertNull(existingProject.getTargetStartDate()); } @Test public void testUpdateProjectStartDateWhenProjectDetailsAlreadySubmitted() { LocalDate now = LocalDate.now(); LocalDate validDate = LocalDate.of(now.getYear(), now.getMonthValue(), 1).plusMonths(1); Project existingProject = newProject().withSubmittedDate(LocalDateTime.now()).build(); assertNull(existingProject.getTargetStartDate()); assertNotNull(existingProject.getSubmittedDate()); when(projectRepositoryMock.findOne(123L)).thenReturn(existingProject); ServiceResult<Void> updateResult = service.updateProjectStartDate(123L, validDate); assertTrue(updateResult.isFailure()); assertTrue(updateResult.getFailure().is(PROJECT_SETUP_PROJECT_DETAILS_CANNOT_BE_UPDATED_IF_ALREADY_SUBMITTED)); verify(projectRepositoryMock).findOne(123L); assertNull(existingProject.getTargetStartDate()); } @Test public void testUpdateFinanceContact() { Project project = newProject().withId(123L).build(); Organisation organisation = newOrganisation().withId(5L).build(); User user = newUser().withid(7L).build(); Role partnerRole = newRole().withType(PARTNER).build(); newProjectUser().withOrganisation(organisation).withUser(user).withProject(project).withRole(partnerRole).build(); Role financeContactRole = newRole().withType(FINANCE_CONTACT).build(); when(projectRepositoryMock.findOne(123L)).thenReturn(project); when(roleRepositoryMock.findOneByName(FINANCE_CONTACT.getName())).thenReturn(financeContactRole); ServiceResult<Void> updateResult = service.updateFinanceContact(123L, 5L, 7L); assertTrue(updateResult.isSuccess()); List<ProjectUser> foundFinanceContacts = simpleFilter(project.getProjectUsers(), projectUser -> projectUser.getOrganisation().equals(organisation) && projectUser.getUser().equals(user) && projectUser.getProject().equals(project) && projectUser.getRole().equals(financeContactRole)); assertEquals(1, foundFinanceContacts.size()); } @Test public void testUpdateFinanceContactWithExistingFinanceContactChosenForSameOrganisation() { Role partnerRole = newRole().withType(PARTNER).build(); Role financeContactRole = newRole().withType(FINANCE_CONTACT).build(); Project project = newProject().withId(123L).build(); Organisation organisation = newOrganisation().withId(5L).build(); User newFinanceContactUser = newUser().withid(7L).build(); newProjectUser().withOrganisation(organisation).withUser(newFinanceContactUser).withProject(project).withRole(partnerRole).build(); User existingFinanceContactUser = newUser().withid(9999L).build(); newProjectUser().withOrganisation(organisation).withUser(existingFinanceContactUser).withProject(project).withRole(partnerRole).build(); newProjectUser().withOrganisation(organisation).withUser(existingFinanceContactUser).withProject(project).withRole(financeContactRole).build(); when(projectRepositoryMock.findOne(123L)).thenReturn(project); when(roleRepositoryMock.findOneByName(FINANCE_CONTACT.getName())).thenReturn(financeContactRole); List<ProjectUser> existingFinanceContactForOrganisation = simpleFilter(project.getProjectUsers(), projectUser -> projectUser.getOrganisation().equals(organisation) && projectUser.getProject().equals(project) && projectUser.getRole().equals(financeContactRole)); assertEquals(1, existingFinanceContactForOrganisation.size()); ServiceResult<Void> updateResult = service.updateFinanceContact(123L, 5L, 7L); assertTrue(updateResult.isSuccess()); List<ProjectUser> foundFinanceContacts = simpleFilter(project.getProjectUsers(), projectUser -> projectUser.getOrganisation().equals(organisation) && projectUser.getUser().equals(newFinanceContactUser) && projectUser.getProject().equals(project) && projectUser.getRole().equals(financeContactRole)); assertEquals(1, foundFinanceContacts.size()); } @Test public void testUpdateFinanceContactButUserIsNotExistingPartner() { Role projectManagerRole = newRole().withType(PROJECT_MANAGER).build(); Project project = newProject().withId(123L).build(); Organisation organisation = newOrganisation().withId(5L).build(); User user = newUser().withid(7L).build(); newProjectUser().withOrganisation(organisation).withUser(user).withProject(project).withRole(projectManagerRole).build(); when(projectRepositoryMock.findOne(123L)).thenReturn(project); ServiceResult<Void> updateResult = service.updateFinanceContact(123L, 5L, 7L); assertTrue(updateResult.isFailure()); assertTrue(updateResult.getFailure().is(PROJECT_SETUP_FINANCE_CONTACT_MUST_BE_A_PARTNER_ON_THE_PROJECT_FOR_THE_ORGANISATION)); verify(processRoleRepositoryMock, never()).save(isA(ProcessRole.class)); } @Test public void testUpdateFinanceContactWhenNotPresentOnTheProject() { long userIdForUserNotOnProject = 6L; Role partnerRole = newRole().withType(PARTNER).build(); Project existingProject = newProject().withId(123L).build(); Project anotherProject = newProject().withId(9999L).build(); when(projectRepositoryMock.findOne(123L)).thenReturn(existingProject); Organisation organisation = newOrganisation().withId(5L).build(); User user = newUser().withid(7L).build(); newProjectUser().withOrganisation(organisation).withUser(user).withProject(anotherProject).withRole(partnerRole).build(); ServiceResult<Void> updateResult = service.updateFinanceContact(123L, 5L, userIdForUserNotOnProject); assertTrue(updateResult.isFailure()); assertTrue(updateResult.getFailure().is(PROJECT_SETUP_FINANCE_CONTACT_MUST_BE_A_USER_ON_THE_PROJECT_FOR_THE_ORGANISATION)); } @Test public void testUpdateFinanceContactWhenProjectDetailsAlreadySubmitted() { Project project = newProject().withId(123L).withSubmittedDate(LocalDateTime.now()).build(); assertTrue(project.getProjectUsers().isEmpty()); when(projectRepositoryMock.findOne(123L)).thenReturn(project); ServiceResult<Void> updateResult = service.updateFinanceContact(123L, 5L, 7L); assertTrue(updateResult.isFailure()); assertTrue(updateResult.getFailure().is(PROJECT_SETUP_PROJECT_DETAILS_CANNOT_BE_UPDATED_IF_ALREADY_SUBMITTED)); verify(projectRepositoryMock).findOne(123L); assertTrue(project.getProjectUsers().isEmpty()); } @Test public void testFindByUserIdReturnsOnlyDistinctProjects() { Project project = newProject().withId(123L).build(); Organisation organisation = newOrganisation().withId(5L).build(); User user = newUser().withid(7L).build(); Role partnerRole = newRole().withType(PARTNER).build(); Role financeContactRole = newRole().withType(FINANCE_CONTACT).build(); ProjectUser projectUserWithPartnerRole = newProjectUser().withOrganisation(organisation).withUser(user).withProject(project).withRole(partnerRole).build(); ProjectUser projectUserWithFinanceRole = newProjectUser().withOrganisation(organisation).withUser(user).withProject(project).withRole(financeContactRole).build(); List<ProjectUser> projectUserRecords = asList(projectUserWithPartnerRole, projectUserWithFinanceRole); ProjectResource projectResource = newProjectResource().withId(project.getId()).build(); when(projectUserRepositoryMock.findByUserId(user.getId())).thenReturn(projectUserRecords); when(projectMapperMock.mapToResource(project)).thenReturn(projectResource); ServiceResult<List<ProjectResource>> result = service.findByUserId(user.getId()); assertTrue(result.isSuccess()); assertEquals(result.getSuccessObject().size(), 1L); } @Test public void testUpdateProjectAddressToBeRegisteredAddress() { Project project = newProject().withId(1L).build(); Organisation leadOrganisation = newOrganisation().withId(1L).build(); AddressResource existingRegisteredAddressResource = newAddressResource().build(); Address registeredAddress = newAddress().build(); when(projectRepositoryMock.findOne(project.getId())).thenReturn(project); when(organisationRepositoryMock.findOne(organisation.getId())).thenReturn(organisation); when(addressRepositoryMock.exists(existingRegisteredAddressResource.getId())).thenReturn(true); when(addressRepositoryMock.findOne(existingRegisteredAddressResource.getId())).thenReturn(registeredAddress); ServiceResult<Void> result = service.updateProjectAddress(leadOrganisation.getId(), project.getId(), REGISTERED, existingRegisteredAddressResource); assertTrue(result.isSuccess()); } @Test public void testUpdateProjectAddressToBeOperatingAddress() { Project project = newProject().withId(1L).build(); Organisation leadOrganisation = newOrganisation().withId(1L).build(); AddressResource existingOperatingAddressResource = newAddressResource().build(); Address operatingAddress = newAddress().build(); when(projectRepositoryMock.findOne(project.getId())).thenReturn(project); when(organisationRepositoryMock.findOne(organisation.getId())).thenReturn(organisation); when(addressRepositoryMock.exists(existingOperatingAddressResource.getId())).thenReturn(true); when(addressRepositoryMock.findOne(existingOperatingAddressResource.getId())).thenReturn(operatingAddress); ServiceResult<Void> result = service.updateProjectAddress(leadOrganisation.getId(), project.getId(), OPERATING, existingOperatingAddressResource); assertTrue(result.isSuccess()); } @Test public void testUpdateProjectAddressToNewProjectAddress() { Project project = newProject().withId(1L).build(); Organisation leadOrganisation = newOrganisation().withId(1L).build(); AddressResource newAddressResource = newAddressResource().build(); Address newAddress = newAddress().build(); AddressType projectAddressType = newAddressType().withId((long) PROJECT.getOrdinal()).withName(PROJECT.name()).build(); OrganisationAddress organisationAddress = newOrganisationAddress().withOrganisation(leadOrganisation).withAddress(newAddress).withAddressType(projectAddressType).build(); when(projectRepositoryMock.findOne(project.getId())).thenReturn(project); when(organisationRepositoryMock.findOne(organisation.getId())).thenReturn(organisation); when(addressRepositoryMock.exists(newAddressResource.getId())).thenReturn(false); when(addressMapperMock.mapToDomain(newAddressResource)).thenReturn(newAddress); when(addressTypeRepositoryMock.findOne((long) PROJECT.getOrdinal())).thenReturn(projectAddressType); when(organisationAddressRepositoryMock.findByOrganisationIdAndAddressType(leadOrganisation.getId(), projectAddressType)).thenReturn(emptyList()); when(organisationAddressRepositoryMock.save(organisationAddress)).thenReturn(organisationAddress); ServiceResult<Void> result = service.updateProjectAddress(leadOrganisation.getId(), project.getId(), PROJECT, newAddressResource); assertTrue(result.isSuccess()); } @Test public void testSaveProjectSubmitDateTimeIsSuccessfulWhenAllProjectDetailsHaveBeenProvided() { Organisation organisation1 = newOrganisation().build(); Organisation organisation2 = newOrganisation().build(); Organisation organisation3 = newOrganisation().build(); Role projectManagerRole = newRole().withType(PROJECT_MANAGER).build(); ProjectUser projectManagerProjectUser = newProjectUser().withRole(projectManagerRole).build(); Address address = newAddress().build(); Project project = newProject().withId(1L).withAddress(address).withProjectUsers(singletonList(projectManagerProjectUser)).withTargetStartDate(LocalDate.now()).build(); Role financeContactRole = newRole().withType(FINANCE_CONTACT).build(); Role partnerRole = newRole().withType(PARTNER).build(); List<ProjectUser> projectUserObjs; ProjectUser projectUser1WithPartnerRole = newProjectUser().withProject(project).withOrganisation(organisation1).withRole(partnerRole).build(); ProjectUser projectUser1WithFinanceRole = newProjectUser().withProject(project).withOrganisation(organisation1).withRole(financeContactRole).build(); ProjectUser projectUser2WithPartnerRole = newProjectUser().withProject(project).withOrganisation(organisation2).withRole(partnerRole).build(); ProjectUser projectUser2WithFinanceRole = newProjectUser().withProject(project).withOrganisation(organisation2).withRole(financeContactRole).build(); ProjectUser projectUser3WithPartnerRole = newProjectUser().withProject(project).withOrganisation(organisation3).withRole(partnerRole).build(); ProjectUser projectUser3WithFinanceRole = newProjectUser().withProject(project).withOrganisation(organisation3).withRole(financeContactRole).build(); ProjectUserResource projectUser1WithPartnerRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation1.getId()).withRole(partnerRole.getId()).withRoleName(PARTNER.getName()).build(); ProjectUserResource projectUser1WithFinanceRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation1.getId()).withRole(financeContactRole.getId()).withRoleName(FINANCE_CONTACT.getName()).build(); ProjectUserResource projectUser2WithPartnerRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation2.getId()).withRole(partnerRole.getId()).withRoleName(PARTNER.getName()).build(); ProjectUserResource projectUser2WithFinanceRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation2.getId()).withRole(financeContactRole.getId()).withRoleName(FINANCE_CONTACT.getName()).build(); ProjectUserResource projectUser3WithPartnerRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation3.getId()).withRole(partnerRole.getId()).withRoleName(PARTNER.getName()).build(); ProjectUserResource projectUser3WithFinanceRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation3.getId()).withRole(partnerRole.getId()).withRoleName(FINANCE_CONTACT.getName()).build(); ProjectUserResource projectManagerProjectUserResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation3.getId()).withRole(projectManagerRole.getId()).withRoleName(PROJECT_MANAGER.getName()).build(); projectUserObjs = asList(projectManagerProjectUser, projectUser1WithPartnerRole, projectUser1WithFinanceRole, projectUser2WithPartnerRole, projectUser2WithFinanceRole, projectUser3WithPartnerRole, projectUser3WithFinanceRole); when(projectRepositoryMock.findOne(1L)).thenReturn(project); when(projectUserRepositoryMock.findByProjectId(1L)).thenReturn(projectUserObjs); when(organisationRepositoryMock.findOne(organisation1.getId())).thenReturn(organisation1); when(organisationRepositoryMock.findOne(organisation2.getId())).thenReturn(organisation2); when(organisationRepositoryMock.findOne(organisation3.getId())).thenReturn(organisation3); when(projectUserMapperMock.mapToResource(projectUser1WithFinanceRole)).thenReturn(projectUser1WithFinanceRoleResource); when(projectUserMapperMock.mapToResource(projectUser1WithPartnerRole)).thenReturn(projectUser1WithPartnerRoleResource); when(projectUserMapperMock.mapToResource(projectUser2WithPartnerRole)).thenReturn(projectUser2WithPartnerRoleResource); when(projectUserMapperMock.mapToResource(projectUser2WithFinanceRole)).thenReturn(projectUser2WithFinanceRoleResource); when(projectUserMapperMock.mapToResource(projectUser3WithPartnerRole)).thenReturn(projectUser3WithPartnerRoleResource); when(projectUserMapperMock.mapToResource(projectUser3WithFinanceRole)).thenReturn(projectUser3WithFinanceRoleResource); when(projectUserMapperMock.mapToResource(projectManagerProjectUser)).thenReturn(projectManagerProjectUserResource); ServiceResult result = service.saveProjectSubmitDateTime(1L, LocalDateTime.now()); assertTrue(result.isSuccess()); } @Test public void testSaveProjectSubmitDateTimeIsUnSuccessfulWhenAFinanceContactIsMissing() { Organisation organisation1 = newOrganisation().build(); Organisation organisation2 = newOrganisation().build(); Organisation organisation3 = newOrganisation().build(); Role projectManagerRole = newRole().withType(PROJECT_MANAGER).build(); ProjectUser projectManagerProjectUser = newProjectUser().withRole(projectManagerRole).build(); Address address = newAddress().build(); Project project = newProject().withId(1L).withAddress(address).withProjectUsers(singletonList(projectManagerProjectUser)).withTargetStartDate(LocalDate.now()).build(); Role financeContactRole = newRole().withType(FINANCE_CONTACT).build(); Role partnerRole = newRole().withType(PARTNER).build(); List<ProjectUser> projectUserObjs; ProjectUser projectUser1WithPartnerRole = newProjectUser().withProject(project).withOrganisation(organisation1).withRole(partnerRole).build(); ProjectUser projectUser1WithFinanceRole = newProjectUser().withProject(project).withOrganisation(organisation1).withRole(financeContactRole).build(); ProjectUser projectUser2WithPartnerRole = newProjectUser().withProject(project).withOrganisation(organisation2).withRole(partnerRole).build(); ProjectUser projectUser2WithFinanceRole = newProjectUser().withProject(project).withOrganisation(organisation2).withRole(financeContactRole).build(); ProjectUser projectUserWithPartnerRole = newProjectUser().withProject(project).withOrganisation(organisation3).withRole(partnerRole).build(); ProjectUserResource projectUser1WithPartnerRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation1.getId()).withRole(partnerRole.getId()).withRoleName(PARTNER.getName()).build(); ProjectUserResource projectUser1WithFinanceRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation1.getId()).withRole(financeContactRole.getId()).withRoleName(FINANCE_CONTACT.getName()).build(); ProjectUserResource projectUser2WithPartnerRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation2.getId()).withRole(partnerRole.getId()).withRoleName(PARTNER.getName()).build(); ProjectUserResource projectUser2WithFinanceRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation2.getId()).withRole(financeContactRole.getId()).withRoleName(FINANCE_CONTACT.getName()).build(); ProjectUserResource projectUserWithPartnerRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation3.getId()).withRole(partnerRole.getId()).withRoleName(PARTNER.getName()).build(); projectUserObjs = asList(projectUser1WithPartnerRole, projectUser1WithFinanceRole, projectUser2WithPartnerRole, projectUser2WithFinanceRole, projectUserWithPartnerRole); when(projectRepositoryMock.findOne(1L)).thenReturn(project); when(projectUserRepositoryMock.findByProjectId(1L)).thenReturn(projectUserObjs); when(organisationRepositoryMock.findOne(organisation1.getId())).thenReturn(organisation1); when(organisationRepositoryMock.findOne(organisation2.getId())).thenReturn(organisation2); when(organisationRepositoryMock.findOne(organisation3.getId())).thenReturn(organisation3); when(projectUserMapperMock.mapToResource(projectUser1WithFinanceRole)).thenReturn(projectUser1WithFinanceRoleResource); when(projectUserMapperMock.mapToResource(projectUser1WithPartnerRole)).thenReturn(projectUser1WithPartnerRoleResource); when(projectUserMapperMock.mapToResource(projectUser2WithPartnerRole)).thenReturn(projectUser2WithPartnerRoleResource); when(projectUserMapperMock.mapToResource(projectUser2WithFinanceRole)).thenReturn(projectUser2WithFinanceRoleResource); when(projectUserMapperMock.mapToResource(projectUserWithPartnerRole)).thenReturn(projectUserWithPartnerRoleResource); ServiceResult<Void> result = service.saveProjectSubmitDateTime(1L, LocalDateTime.now()); assertTrue(result.isFailure()); assertTrue(result.getFailure().is(PROJECT_SETUP_PROJECT_DETAILS_CANNOT_BE_SUBMITTED_IF_INCOMPLETE)); } @Test public void testCannotSaveProjectSubmitDateTimeWhenNotAllProjectDetailsHaveBeenProvided() { Organisation organisation1 = newOrganisation().build(); Organisation organisation2 = newOrganisation().build(); Organisation organisation3 = newOrganisation().build(); Role projectManagerRole = newRole().withType(PROJECT_MANAGER).build(); ProjectUser projectManagerProjectUser = newProjectUser().withRole(projectManagerRole).build(); Address address = newAddress().build(); Project project = newProject().withId(1L).withAddress(address).withProjectUsers(singletonList(projectManagerProjectUser)).build(); Role financeContactRole = newRole().withType(FINANCE_CONTACT).build(); Role partnerRole = newRole().withType(PARTNER).build(); List<ProjectUser> projectUserObjs; ProjectUser projectUser1WithPartnerRole = newProjectUser().withProject(project).withOrganisation(organisation1).withRole(partnerRole).build(); ProjectUser projectUser1WithFinanceRole = newProjectUser().withProject(project).withOrganisation(organisation1).withRole(financeContactRole).build(); ProjectUser projectUser2WithPartnerRole = newProjectUser().withProject(project).withOrganisation(organisation2).withRole(partnerRole).build(); ProjectUser projectUser2WithFinanceRole = newProjectUser().withProject(project).withOrganisation(organisation2).withRole(financeContactRole).build(); ProjectUser projectUserWithPartnerRole = newProjectUser().withProject(project).withOrganisation(organisation3).withRole(partnerRole).build(); ProjectUserResource projectUser1WithPartnerRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation1.getId()).withRole(partnerRole.getId()).withRoleName(PARTNER.getName()).build(); ProjectUserResource projectUser1WithFinanceRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation1.getId()).withRole(financeContactRole.getId()).withRoleName(FINANCE_CONTACT.getName()).build(); ProjectUserResource projectUser2WithPartnerRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation2.getId()).withRole(partnerRole.getId()).withRoleName(PARTNER.getName()).build(); ProjectUserResource projectUser2WithFinanceRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation2.getId()).withRole(financeContactRole.getId()).withRoleName(FINANCE_CONTACT.getName()).build(); ProjectUserResource projectUserWithPartnerRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation3.getId()).withRole(partnerRole.getId()).withRoleName(PARTNER.getName()).build(); projectUserObjs = asList(projectUser1WithPartnerRole, projectUser1WithFinanceRole, projectUser2WithPartnerRole, projectUser2WithFinanceRole, projectUserWithPartnerRole); when(projectRepositoryMock.findOne(1L)).thenReturn(project); when(projectUserRepositoryMock.findByProjectId(1L)).thenReturn(projectUserObjs); when(organisationRepositoryMock.findOne(organisation1.getId())).thenReturn(organisation1); when(organisationRepositoryMock.findOne(organisation2.getId())).thenReturn(organisation2); when(organisationRepositoryMock.findOne(organisation3.getId())).thenReturn(organisation3); when(projectUserMapperMock.mapToResource(projectUser1WithFinanceRole)).thenReturn(projectUser1WithFinanceRoleResource); when(projectUserMapperMock.mapToResource(projectUser1WithPartnerRole)).thenReturn(projectUser1WithPartnerRoleResource); when(projectUserMapperMock.mapToResource(projectUser2WithPartnerRole)).thenReturn(projectUser2WithPartnerRoleResource); when(projectUserMapperMock.mapToResource(projectUser2WithFinanceRole)).thenReturn(projectUser2WithFinanceRoleResource); when(projectUserMapperMock.mapToResource(projectUserWithPartnerRole)).thenReturn(projectUserWithPartnerRoleResource); ServiceResult<Void> result = service.saveProjectSubmitDateTime(1L, LocalDateTime.now()); assertTrue(result.isFailure()); assertTrue(result.getFailure().is(PROJECT_SETUP_PROJECT_DETAILS_CANNOT_BE_SUBMITTED_IF_INCOMPLETE)); } @Test public void testSaveMOWithDiffProjectIdInURLAndMOResource() { Long projectid = 1L; MonitoringOfficerResource monitoringOfficerResource = newMonitoringOfficerResource() .withProject(3L) .withFirstName("abc") .withLastName("xyz") .withEmail("[email protected]") .withPhoneNumber("078323455") .build(); ServiceResult<Void> result = service.saveMonitoringOfficer(projectid, monitoringOfficerResource); assertTrue(result.getFailure().is(PROJECT_SETUP_PROJECT_ID_IN_URL_MUST_MATCH_PROJECT_ID_IN_MONITORING_OFFICER_RESOURCE)); } @Test public void testSaveMOWhenProjectDetailsNotYetSubmitted() { Long projectid = 1L; Project projectInDB = newProject().withId(1L).build(); when(projectRepositoryMock.findOne(projectid)).thenReturn(projectInDB); ServiceResult<Void> result = service.saveMonitoringOfficer(projectid, monitoringOfficerResource); assertTrue(result.getFailure().is(PROJECT_SETUP_MONITORING_OFFICER_CANNOT_BE_ASSIGNED_UNTIL_PROJECT_DETAILS_SUBMITTED)); } @Test public void testSaveMOWhenMOExistsForAProject() { Long projectid = 1L; // Set this to different values, so that we can assert that it gets updated MonitoringOfficer monitoringOfficerInDB = MonitoringOfficerBuilder.newMonitoringOfficer() .withFirstName("def") .withLastName("klm") .withEmail("[email protected]") .withPhoneNumber("079237439") .build(); Project projectInDB = newProject().withId(1L).withSubmittedDate(LocalDateTime.now()).build(); when(projectRepositoryMock.findOne(projectid)).thenReturn(projectInDB); when(monitoringOfficerRepository.findOneByProjectId(monitoringOfficerResource.getProject())).thenReturn(monitoringOfficerInDB); ServiceResult<Void> result = service.saveMonitoringOfficer(projectid, monitoringOfficerResource); // Assert that the MO in DB is updated with the correct values from MO Resource Assert.assertEquals("First name of MO in DB should be updated with the value from MO Resource", monitoringOfficerInDB.getFirstName(), monitoringOfficerResource.getFirstName()); Assert.assertEquals("Last name of MO in DB should be updated with the value from MO Resource", monitoringOfficerInDB.getLastName(), monitoringOfficerResource.getLastName()); Assert.assertEquals("Email of MO in DB should be updated with the value from MO Resource", monitoringOfficerInDB.getEmail(), monitoringOfficerResource.getEmail()); Assert.assertEquals("Phone number of MO in DB should be updated with the value from MO Resource", monitoringOfficerInDB.getPhoneNumber(), monitoringOfficerResource.getPhoneNumber()); assertTrue(result.isSuccess()); } @Test public void testSaveMOWhenMODoesNotExistForAProject() { Long projectid = 1L; Project projectInDB = newProject().withId(1L).withSubmittedDate(LocalDateTime.now()).build(); when(projectRepositoryMock.findOne(projectid)).thenReturn(projectInDB); when(monitoringOfficerRepository.findOneByProjectId(monitoringOfficerResource.getProject())).thenReturn(null); ServiceResult<Void> result = service.saveMonitoringOfficer(projectid, monitoringOfficerResource); assertTrue(result.isSuccess()); } @Test public void testGetMonitoringOfficerWhenMODoesNotExistInDB() { Long projectid = 1L; ServiceResult<MonitoringOfficerResource> result = service.getMonitoringOfficer(projectid); String errorKey = result.getFailure().getErrors().get(0).getErrorKey(); Assert.assertEquals(CommonFailureKeys.GENERAL_NOT_FOUND.name(), errorKey); } @Test public void testGetMonitoringOfficerWhenMOExistsInDB() { Long projectid = 1L; MonitoringOfficer monitoringOfficerInDB = MonitoringOfficerBuilder.newMonitoringOfficer() .withFirstName("def") .withLastName("klm") .withEmail("[email protected]") .withPhoneNumber("079237439") .build(); when(monitoringOfficerRepository.findOneByProjectId(projectid)).thenReturn(monitoringOfficerInDB); ServiceResult<MonitoringOfficerResource> result = service.getMonitoringOfficer(projectid); assertTrue(result.isSuccess()); } @Test public void testCreateCollaborationAgreementFileEntry() { assertCreateFile( project::getCollaborationAgreement, (fileToCreate, inputStreamSupplier) -> service.createCollaborationAgreementFileEntry(123L, fileToCreate, inputStreamSupplier)); } @Test public void testUpdateCollaborationAgreementFileEntry() { assertUpdateFile( project::getCollaborationAgreement, (fileToUpdate, inputStreamSupplier) -> service.updateCollaborationAgreementFileEntry(123L, fileToUpdate, inputStreamSupplier)); } @Test public void testGetCollaborationAgreementFileEntryDetails() { assertGetFileDetails( project::setCollaborationAgreement, () -> service.getCollaborationAgreementFileEntryDetails(123L)); } @Test public void testGetCollaborationAgreementFileContents() { assertGetFileContents( project::setCollaborationAgreement, () -> service.getCollaborationAgreementFileContents(123L)); } @Test public void testDeleteCollaborationAgreementFile() { assertDeleteFile( project::getCollaborationAgreement, project::setCollaborationAgreement, () -> service.deleteCollaborationAgreementFile(123L)); } @Test public void testCreateExploitationPlanFileEntry() { assertCreateFile( project::getExploitationPlan, (fileToCreate, inputStreamSupplier) -> service.createExploitationPlanFileEntry(123L, fileToCreate, inputStreamSupplier)); } @Test public void testUpdateExploitationPlanFileEntry() { assertUpdateFile( project::getExploitationPlan, (fileToUpdate, inputStreamSupplier) -> service.updateExploitationPlanFileEntry(123L, fileToUpdate, inputStreamSupplier)); } @Test public void testGetExploitationPlanFileEntryDetails() { assertGetFileDetails( project::setExploitationPlan, () -> service.getExploitationPlanFileEntryDetails(123L)); } @Test public void testGetExploitationPlanFileContents() { assertGetFileContents( project::setExploitationPlan, () -> service.getExploitationPlanFileContents(123L)); } @Test public void testDeleteExploitationPlanFile() { assertDeleteFile( project::getExploitationPlan, project::setExploitationPlan, () -> service.deleteExploitationPlanFile(123L)); } @Test public void testRetrieveUploadedFilesExist() { assertUploadedFilesExist( project::setCollaborationAgreement, project::setExploitationPlan, () -> service.retrieveUploadedDocuments(123L)); } @Test public void testFilesCanBeSubmitted() { assertFilesCanBeSubmittedByProjectManagerAndFilesExist( project::setCollaborationAgreement, project::setExploitationPlan, () -> service.isOtherDocumentsSubmitAllowed(123L)); } @Test public void testFilesCannotBeSubmittedIfUserNotProjectManager() { assertFilesCannotBeSubmittedIfNotByProjectManager( project::setCollaborationAgreement, project::setExploitationPlan, () -> service.isOtherDocumentsSubmitAllowed(123L)); } @Test public void testAddPartnerOrganisationNotOnProject(){ Organisation o = newOrganisation().build(); Organisation organisationNotOnProject = newOrganisation().build(); User u = newUser().build(); List<ProjectUser> pu = newProjectUser().withRole(PARTNER).withUser(u).withOrganisation(o).build(1); Project p = newProject().withProjectUsers(pu).build(); when(projectRepositoryMock.findOne(p.getId())).thenReturn(p); when(organisationRepositoryMock.findOne(o.getId())).thenReturn(o); when(organisationRepositoryMock.findOne(organisationNotOnProject.getId())).thenReturn(organisationNotOnProject); when(userRepositoryMock.findOne(u.getId())).thenReturn(u); // Method under test ServiceResult<Void> shouldFail = service.addPartner(p.getId(), u.getId(), organisationNotOnProject.getId()); // Expectations assertTrue(shouldFail.isFailure()); assertTrue(shouldFail.getFailure().is(badRequestError("project does not contain organisation"))); } @Test public void testAddPartnerPartnerAlreadyExists(){ Organisation o = newOrganisation().build(); User u = newUser().build(); List<ProjectUser> pu = newProjectUser().withRole(PARTNER).withUser(u).withOrganisation(o).build(1); Project p = newProject().withProjectUsers(pu).build(); when(projectRepositoryMock.findOne(p.getId())).thenReturn(p); when(organisationRepositoryMock.findOne(o.getId())).thenReturn(o); when(userRepositoryMock.findOne(u.getId())).thenReturn(u); // Method under test ServiceResult<Void> shouldFail = service.addPartner(p.getId(), u.getId(), o.getId()); // Expectations verifyZeroInteractions(projectUserRepositoryMock); assertTrue(shouldFail.isSuccess()); } @Test public void testAddPartner(){ Organisation o = newOrganisation().build(); User u = newUser().build(); List<ProjectUser> pu = newProjectUser().withRole(PARTNER).withUser(u).withOrganisation(o).build(1); Project p = newProject().withProjectUsers(pu).build(); User newUser = newUser().build(); when(projectRepositoryMock.findOne(p.getId())).thenReturn(p); when(organisationRepositoryMock.findOne(o.getId())).thenReturn(o); when(userRepositoryMock.findOne(u.getId())).thenReturn(u); when(userRepositoryMock.findOne(newUser.getId())).thenReturn(u); // Method under test ServiceResult<Void> shouldFail = service.addPartner(p.getId(), newUser.getId(), o.getId()); // Expectations verify(projectUserRepositoryMock).save(isA(ProjectUser.class)); assertTrue(shouldFail.isSuccess()); } private void assertFilesCannotBeSubmittedIfNotByProjectManager(Consumer<FileEntry> fileSetter1, Consumer<FileEntry> fileSetter2, Supplier<ServiceResult<Boolean>> getConditionFn) { List<ProjectUser> projectUsers = new ArrayList<>(); Arrays.stream(UserRoleType.values()) .filter(roleType -> !roleType.getName().equals(UserRoleType.PROJECT_MANAGER.getName())) .forEach(roleType -> { ProjectUser projectUser = newProjectUser() .withId(3L) .withRole(roleType) .build(); projectUsers.add(projectUser); }); when(projectUserRepositoryMock.findByProjectId(123L)).thenReturn(projectUsers); Supplier<InputStream> inputStreamSupplier1 = () -> null; Supplier<InputStream> inputStreamSupplier2 = () -> null; getFileEntryResources(fileSetter1, fileSetter2, inputStreamSupplier1, inputStreamSupplier2); ServiceResult<Boolean> result = getConditionFn.get(); assertFalse(result.isSuccess()); assertTrue(result.isFailure()); } private void assertFilesCanBeSubmittedByProjectManagerAndFilesExist(Consumer<FileEntry> fileSetter1, Consumer<FileEntry> fileSetter2, Supplier<ServiceResult<Boolean>> getConditionFn) { ProjectUser projectUserToSet = newProjectUser() .withId(1L) .withRole(projectManagerRole) .build(); List<ProjectUser> projectUsers = new ArrayList<>(); projectUsers.add(projectUserToSet); when(projectUserRepositoryMock.findByProjectId(123L)).thenReturn(projectUsers); Supplier<InputStream> inputStreamSupplier1 = () -> null; Supplier<InputStream> inputStreamSupplier2 = () -> null; getFileEntryResources(fileSetter1, fileSetter2, inputStreamSupplier1, inputStreamSupplier2); ServiceResult<Boolean> result = getConditionFn.get(); assertTrue(result.isSuccess()); assertTrue(result.getSuccessObject()); } private void assertUploadedFilesExist(Consumer<FileEntry> fileSetter1, Consumer<FileEntry> fileSetter2, Supplier<List<ServiceResult<FileAndContents>>> getFileContentsFnForFiles) { Supplier<InputStream> inputStreamSupplier1 = () -> null; Supplier<InputStream> inputStreamSupplier2 = () -> null; List<FileEntryResource> fileEntryResourcesToGet = getFileEntryResources(fileSetter1, fileSetter2, inputStreamSupplier1, inputStreamSupplier2); List<ServiceResult<FileAndContents>> results = getFileContentsFnForFiles.get(); assertTrue(results.get(0).isSuccess()); assertTrue(results.get(1).isSuccess()); assertEquals(fileEntryResourcesToGet.get(0), results.get(0).getSuccessObject().getFileEntry()); assertEquals(fileEntryResourcesToGet.get(1), results.get(1).getSuccessObject().getFileEntry()); assertEquals(inputStreamSupplier1, results.get(0).getSuccessObject().getContentsSupplier()); assertEquals(inputStreamSupplier2, results.get(1).getSuccessObject().getContentsSupplier()); } private List<FileEntryResource> getFileEntryResources(Consumer<FileEntry> fileSetter1, Consumer<FileEntry> fileSetter2, Supplier<InputStream> inputStreamSupplier1, Supplier<InputStream> inputStreamSupplier2) { FileEntry fileEntry1ToGet = newFileEntry().build(); FileEntry fileEntry2ToGet = newFileEntry().build(); List<FileEntryResource> fileEntryResourcesToGet = newFileEntryResource().withFilesizeBytes(100).build(2); fileSetter1.accept(fileEntry1ToGet); fileSetter2.accept(fileEntry2ToGet); when(fileServiceMock.getFileByFileEntryId(fileEntry1ToGet.getId())).thenReturn(serviceSuccess(inputStreamSupplier1)); when(fileServiceMock.getFileByFileEntryId(fileEntry2ToGet.getId())).thenReturn(serviceSuccess(inputStreamSupplier2)); when(fileEntryMapperMock.mapToResource(fileEntry1ToGet)).thenReturn(fileEntryResourcesToGet.get(0)); when(fileEntryMapperMock.mapToResource(fileEntry2ToGet)).thenReturn(fileEntryResourcesToGet.get(1)); return fileEntryResourcesToGet; } private void assertGetFileContents(Consumer<FileEntry> fileSetter, Supplier<ServiceResult<FileAndContents>> getFileContentsFn) { FileEntry fileToGet = newFileEntry().build(); Supplier<InputStream> inputStreamSupplier = () -> null; FileEntryResource fileResourceToGet = newFileEntryResource().build(); fileSetter.accept(fileToGet); when(fileServiceMock.getFileByFileEntryId(fileToGet.getId())).thenReturn(serviceSuccess(inputStreamSupplier)); when(fileEntryMapperMock.mapToResource(fileToGet)).thenReturn(fileResourceToGet); ServiceResult<FileAndContents> result = getFileContentsFn.get(); assertTrue(result.isSuccess()); assertEquals(fileResourceToGet, result.getSuccessObject().getFileEntry()); assertEquals(inputStreamSupplier, result.getSuccessObject().getContentsSupplier()); } private void assertCreateFile(Supplier<FileEntry> fileGetter, BiFunction<FileEntryResource, Supplier<InputStream>, ServiceResult<FileEntryResource>> createFileFn) { FileEntryResource fileToCreate = newFileEntryResource().build(); Supplier<InputStream> inputStreamSupplier = () -> null; FileEntry createdFile = newFileEntry().build(); FileEntryResource createdFileResource = newFileEntryResource().build(); when(fileServiceMock.createFile(fileToCreate, inputStreamSupplier)).thenReturn(serviceSuccess(Pair.of(new File("blah"), createdFile))); when(fileEntryMapperMock.mapToResource(createdFile)).thenReturn(createdFileResource); ServiceResult<FileEntryResource> result = createFileFn.apply(fileToCreate, inputStreamSupplier); assertTrue(result.isSuccess()); assertEquals(createdFileResource, result.getSuccessObject()); assertEquals(createdFile, fileGetter.get()); } private void assertGetFileDetails(Consumer<FileEntry> fileSetter, Supplier<ServiceResult<FileEntryResource>> getFileDetailsFn) { FileEntry fileToGet = newFileEntry().build(); FileEntryResource fileResourceToGet = newFileEntryResource().build(); fileSetter.accept(fileToGet); when(fileEntryMapperMock.mapToResource(fileToGet)).thenReturn(fileResourceToGet); ServiceResult<FileEntryResource> result = getFileDetailsFn.get(); assertTrue(result.isSuccess()); assertEquals(fileResourceToGet, result.getSuccessObject()); } private void assertDeleteFile(Supplier<FileEntry> fileGetter, Consumer<FileEntry> fileSetter, Supplier<ServiceResult<Void>> deleteFileFn) { FileEntry fileToDelete = newFileEntry().build(); fileSetter.accept(fileToDelete); when(fileServiceMock.deleteFile(fileToDelete.getId())).thenReturn(serviceSuccess(fileToDelete)); ServiceResult<Void> result = deleteFileFn.get(); assertTrue(result.isSuccess()); assertNull(fileGetter.get()); verify(fileServiceMock).deleteFile(fileToDelete.getId()); } private void assertUpdateFile(Supplier<FileEntry> fileGetter, BiFunction<FileEntryResource, Supplier<InputStream>, ServiceResult<Void>> updateFileFn) { FileEntryResource fileToUpdate = newFileEntryResource().build(); Supplier<InputStream> inputStreamSupplier = () -> null; FileEntry updatedFile = newFileEntry().build(); FileEntryResource updatedFileResource = newFileEntryResource().build(); when(fileServiceMock.updateFile(fileToUpdate, inputStreamSupplier)).thenReturn(serviceSuccess(Pair.of(new File("blah"), updatedFile))); when(fileEntryMapperMock.mapToResource(updatedFile)).thenReturn(updatedFileResource); ServiceResult<Void> result = updateFileFn.apply(fileToUpdate, inputStreamSupplier); assertTrue(result.isSuccess()); assertEquals(updatedFile, fileGetter.get()); verify(fileServiceMock).updateFile(fileToUpdate, inputStreamSupplier); } private Project createProjectExpectationsFromOriginalApplication(Application application) { assertFalse(application.getProcessRoles().isEmpty()); return createLambdaMatcher(project -> { assertEquals(application.getName(), project.getName()); assertEquals(application.getDurationInMonths(), project.getDurationInMonths()); assertEquals(application.getStartDate(), project.getTargetStartDate()); assertFalse(project.getProjectUsers().isEmpty()); assertNull(project.getAddress()); List<ProcessRole> collaborativeRoles = simpleFilter(application.getProcessRoles(), ProcessRole::isLeadApplicantOrCollaborator); assertEquals(collaborativeRoles.size(), project.getProjectUsers().size()); collaborativeRoles.forEach(processRole -> { List<ProjectUser> matchingProjectUser = simpleFilter(project.getProjectUsers(), projectUser -> projectUser.getOrganisation().equals(processRole.getOrganisation()) && projectUser.getUser().equals(processRole.getUser())); assertEquals(1, matchingProjectUser.size()); assertEquals(PARTNER.getName(), matchingProjectUser.get(0).getRole().getName()); assertEquals(project, matchingProjectUser.get(0).getProject()); }); }); } @Override protected ProjectService supplyServiceUnderTest() { return new ProjectServiceImpl(); } }
ifs-data-service/src/test/java/com/worth/ifs/project/transactional/ProjectServiceImplTest.java
package com.worth.ifs.project.transactional; import com.worth.ifs.BaseServiceUnitTest; import com.worth.ifs.address.domain.Address; import com.worth.ifs.address.domain.AddressType; import com.worth.ifs.address.resource.AddressResource; import com.worth.ifs.application.domain.Application; import com.worth.ifs.commons.error.CommonFailureKeys; import com.worth.ifs.commons.service.ServiceResult; import com.worth.ifs.file.domain.FileEntry; import com.worth.ifs.file.resource.FileEntryResource; import com.worth.ifs.file.service.FileAndContents; import com.worth.ifs.organisation.domain.OrganisationAddress; import com.worth.ifs.project.builder.MonitoringOfficerBuilder; import com.worth.ifs.project.domain.MonitoringOfficer; import com.worth.ifs.project.domain.Project; import com.worth.ifs.project.domain.ProjectUser; import com.worth.ifs.project.resource.MonitoringOfficerResource; import com.worth.ifs.project.resource.ProjectResource; import com.worth.ifs.project.resource.ProjectUserResource; import com.worth.ifs.user.domain.Organisation; import com.worth.ifs.user.domain.ProcessRole; import com.worth.ifs.user.domain.Role; import com.worth.ifs.user.domain.User; import com.worth.ifs.user.resource.UserRoleType; import org.apache.commons.lang3.tuple.Pair; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import java.io.File; import java.io.InputStream; import java.time.LocalDate; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.function.BiFunction; import java.util.function.Consumer; import java.util.function.Supplier; import static com.worth.ifs.LambdaMatcher.createLambdaMatcher; import static com.worth.ifs.address.builder.AddressBuilder.newAddress; import static com.worth.ifs.address.builder.AddressResourceBuilder.newAddressResource; import static com.worth.ifs.address.builder.AddressTypeBuilder.newAddressType; import static com.worth.ifs.address.resource.OrganisationAddressType.*; import static com.worth.ifs.application.builder.ApplicationBuilder.newApplication; import static com.worth.ifs.commons.error.CommonErrors.notFoundError; import static com.worth.ifs.commons.error.CommonFailureKeys.*; import static com.worth.ifs.commons.service.ServiceResult.serviceSuccess; import static com.worth.ifs.file.domain.builders.FileEntryBuilder.newFileEntry; import static com.worth.ifs.file.resource.builders.FileEntryResourceBuilder.newFileEntryResource; import static com.worth.ifs.organisation.builder.OrganisationAddressBuilder.newOrganisationAddress; import static com.worth.ifs.project.builder.MonitoringOfficerResourceBuilder.newMonitoringOfficerResource; import static com.worth.ifs.project.builder.ProjectBuilder.newProject; import static com.worth.ifs.project.builder.ProjectResourceBuilder.newProjectResource; import static com.worth.ifs.project.builder.ProjectUserBuilder.newProjectUser; import static com.worth.ifs.project.builder.ProjectUserResourceBuilder.newProjectUserResource; import static com.worth.ifs.user.builder.OrganisationBuilder.newOrganisation; import static com.worth.ifs.user.builder.ProcessRoleBuilder.newProcessRole; import static com.worth.ifs.user.builder.RoleBuilder.newRole; import static com.worth.ifs.user.builder.UserBuilder.newUser; import static com.worth.ifs.user.resource.UserRoleType.*; import static com.worth.ifs.util.CollectionFunctions.simpleFilter; import static java.util.Arrays.asList; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static org.junit.Assert.*; import static org.mockito.Mockito.*; public class ProjectServiceImplTest extends BaseServiceUnitTest<ProjectService> { private Long projectId = 123L; private Long applicationId = 456L; private Long userId = 7L; private Long otherUserId = 8L; private Application application; private Organisation organisation; private Role leadApplicantRole; private Role projectManagerRole; private Role partnerRole; private User user; private ProcessRole leadApplicantProcessRole; private ProjectUser leadPartnerProjectUser; private Project project; private MonitoringOfficerResource monitoringOfficerResource; @Before public void setUp() { organisation = newOrganisation().build(); leadApplicantRole = newRole(LEADAPPLICANT).build(); projectManagerRole = newRole(PROJECT_MANAGER).build(); partnerRole = newRole(PARTNER).build(); user = newUser(). withid(userId). build(); leadApplicantProcessRole = newProcessRole(). withOrganisation(organisation). withRole(leadApplicantRole). withUser(user). build(); leadPartnerProjectUser = newProjectUser(). withOrganisation(organisation). withRole(partnerRole). withUser(user). build(); application = newApplication(). withId(applicationId). withProcessRoles(leadApplicantProcessRole). withName("My Application"). withDurationInMonths(5L). withStartDate(LocalDate.of(2017, 3, 2)). build(); project = newProject(). withId(projectId). withApplication(application). withProjectUsers(singletonList(leadPartnerProjectUser)). build(); monitoringOfficerResource = newMonitoringOfficerResource() .withProject(1L) .withFirstName("abc") .withLastName("xyz") .withEmail("[email protected]") .withPhoneNumber("078323455") .build(); when(applicationRepositoryMock.findOne(applicationId)).thenReturn(application); when(projectRepositoryMock.findOne(projectId)).thenReturn(project); } @Test public void testCreateProjectFromApplication() { Role partnerRole = newRole().withType(PARTNER).build(); ProjectResource newProjectResource = newProjectResource().build(); when(applicationRepositoryMock.findOne(applicationId)).thenReturn(application); Project savedProject = newProject().build(); when(roleRepositoryMock.findOneByName(PARTNER.getName())).thenReturn(partnerRole); Project newProjectExpectations = createProjectExpectationsFromOriginalApplication(application); when(projectRepositoryMock.save(newProjectExpectations)).thenReturn(savedProject); when(projectMapperMock.mapToResource(savedProject)).thenReturn(newProjectResource); ServiceResult<ProjectResource> project = service.createProjectFromApplication(applicationId); assertTrue(project.isSuccess()); assertEquals(newProjectResource, project.getSuccessObject()); } @Test public void testInvalidProjectManagerProvided() { ServiceResult<Void> result = service.setProjectManager(projectId, otherUserId); assertFalse(result.isSuccess()); assertTrue(result.getFailure().is(PROJECT_SETUP_PROJECT_MANAGER_MUST_BE_LEAD_PARTNER)); } @Test public void testSetProjectManagerWhenProjectDetailsAlreadySubmitted() { Project existingProject = newProject().withSubmittedDate(LocalDateTime.now()).build(); assertTrue(existingProject.getProjectUsers().isEmpty()); when(projectRepositoryMock.findOne(projectId)).thenReturn(existingProject); ServiceResult<Void> result = service.setProjectManager(projectId, userId); assertTrue(result.isFailure()); assertTrue(result.getFailure().is(PROJECT_SETUP_PROJECT_DETAILS_CANNOT_BE_UPDATED_IF_ALREADY_SUBMITTED)); assertTrue(existingProject.getProjectUsers().isEmpty()); } @Test public void testValidProjectManagerProvided() { when(roleRepositoryMock.findOneByName(PROJECT_MANAGER.getName())).thenReturn(projectManagerRole); ServiceResult<Void> result = service.setProjectManager(projectId, userId); assertTrue(result.isSuccess()); ProjectUser expectedProjectManager = newProjectUser(). withId(). withProject(project). withOrganisation(organisation). withRole(projectManagerRole). withUser(user). build(); assertEquals(expectedProjectManager, project.getProjectUsers().get(project.getProjectUsers().size() - 1)); } @Test public void testValidProjectManagerProvidedWithExistingProjectManager() { User differentUser = newUser().build(); Organisation differentOrganisation = newOrganisation().build(); @SuppressWarnings("unused") ProjectUser existingProjenullctManager = newProjectUser(). withId(456L). withProject(project). withRole(projectManagerRole). withOrganisation(differentOrganisation). withUser(differentUser). build(); when(roleRepositoryMock.findOneByName(PROJECT_MANAGER.getName())).thenReturn(projectManagerRole); ServiceResult<Void> result = service.setProjectManager(projectId, userId); assertTrue(result.isSuccess()); ProjectUser expectedProjectManager = newProjectUser(). withId(456L). withProject(project). withOrganisation(organisation). withRole(projectManagerRole). withUser(user). build(); assertEquals(expectedProjectManager, project.getProjectUsers().get(project.getProjectUsers().size() - 1)); } @Test public void testUpdateProjectStartDate() { LocalDate now = LocalDate.now(); LocalDate validDate = LocalDate.of(now.getYear(), now.getMonthValue(), 1).plusMonths(1); Project existingProject = newProject().build(); assertNull(existingProject.getTargetStartDate()); when(projectRepositoryMock.findOne(123L)).thenReturn(existingProject); ServiceResult<Void> updateResult = service.updateProjectStartDate(123L, validDate); assertTrue(updateResult.isSuccess()); verify(projectRepositoryMock).findOne(123L); assertEquals(validDate, existingProject.getTargetStartDate()); } @Test public void testUpdateProjectStartDateButProjectDoesntExist() { LocalDate now = LocalDate.now(); LocalDate validDate = LocalDate.of(now.getYear(), now.getMonthValue(), 1).plusMonths(1); when(projectRepositoryMock.findOne(123L)).thenReturn(null); ServiceResult<Void> updateResult = service.updateProjectStartDate(123L, validDate); assertTrue(updateResult.isFailure()); assertTrue(updateResult.getFailure().is(notFoundError(Project.class, 123L))); } @Test public void testUpdateProjectStartDateButStartDateDoesntBeginOnFirstDayOfMonth() { LocalDate now = LocalDate.now(); LocalDate dateNotOnFirstDayOfMonth = LocalDate.of(now.getYear(), now.getMonthValue(), 2).plusMonths(1); Project existingProject = newProject().build(); assertNull(existingProject.getTargetStartDate()); when(projectRepositoryMock.findOne(123L)).thenReturn(existingProject); ServiceResult<Void> updateResult = service.updateProjectStartDate(123L, dateNotOnFirstDayOfMonth); assertTrue(updateResult.isFailure()); assertTrue(updateResult.getFailure().is(PROJECT_SETUP_DATE_MUST_START_ON_FIRST_DAY_OF_MONTH)); verify(projectRepositoryMock, never()).findOne(123L); assertNull(existingProject.getTargetStartDate()); } @Test public void testUpdateProjectStartDateButStartDateNotInFuture() { LocalDate now = LocalDate.now(); LocalDate pastDate = LocalDate.of(now.getYear(), now.getMonthValue(), 1).minusMonths(1); Project existingProject = newProject().build(); assertNull(existingProject.getTargetStartDate()); when(projectRepositoryMock.findOne(123L)).thenReturn(existingProject); ServiceResult<Void> updateResult = service.updateProjectStartDate(123L, pastDate); assertTrue(updateResult.isFailure()); assertTrue(updateResult.getFailure().is(PROJECT_SETUP_DATE_MUST_BE_IN_THE_FUTURE)); verify(projectRepositoryMock, never()).findOne(123L); assertNull(existingProject.getTargetStartDate()); } @Test public void testUpdateProjectStartDateWhenProjectDetailsAlreadySubmitted() { LocalDate now = LocalDate.now(); LocalDate validDate = LocalDate.of(now.getYear(), now.getMonthValue(), 1).plusMonths(1); Project existingProject = newProject().withSubmittedDate(LocalDateTime.now()).build(); assertNull(existingProject.getTargetStartDate()); assertNotNull(existingProject.getSubmittedDate()); when(projectRepositoryMock.findOne(123L)).thenReturn(existingProject); ServiceResult<Void> updateResult = service.updateProjectStartDate(123L, validDate); assertTrue(updateResult.isFailure()); assertTrue(updateResult.getFailure().is(PROJECT_SETUP_PROJECT_DETAILS_CANNOT_BE_UPDATED_IF_ALREADY_SUBMITTED)); verify(projectRepositoryMock).findOne(123L); assertNull(existingProject.getTargetStartDate()); } @Test public void testUpdateFinanceContact() { Project project = newProject().withId(123L).build(); Organisation organisation = newOrganisation().withId(5L).build(); User user = newUser().withid(7L).build(); Role partnerRole = newRole().withType(PARTNER).build(); newProjectUser().withOrganisation(organisation).withUser(user).withProject(project).withRole(partnerRole).build(); Role financeContactRole = newRole().withType(FINANCE_CONTACT).build(); when(projectRepositoryMock.findOne(123L)).thenReturn(project); when(roleRepositoryMock.findOneByName(FINANCE_CONTACT.getName())).thenReturn(financeContactRole); ServiceResult<Void> updateResult = service.updateFinanceContact(123L, 5L, 7L); assertTrue(updateResult.isSuccess()); List<ProjectUser> foundFinanceContacts = simpleFilter(project.getProjectUsers(), projectUser -> projectUser.getOrganisation().equals(organisation) && projectUser.getUser().equals(user) && projectUser.getProject().equals(project) && projectUser.getRole().equals(financeContactRole)); assertEquals(1, foundFinanceContacts.size()); } @Test public void testUpdateFinanceContactWithExistingFinanceContactChosenForSameOrganisation() { Role partnerRole = newRole().withType(PARTNER).build(); Role financeContactRole = newRole().withType(FINANCE_CONTACT).build(); Project project = newProject().withId(123L).build(); Organisation organisation = newOrganisation().withId(5L).build(); User newFinanceContactUser = newUser().withid(7L).build(); newProjectUser().withOrganisation(organisation).withUser(newFinanceContactUser).withProject(project).withRole(partnerRole).build(); User existingFinanceContactUser = newUser().withid(9999L).build(); newProjectUser().withOrganisation(organisation).withUser(existingFinanceContactUser).withProject(project).withRole(partnerRole).build(); newProjectUser().withOrganisation(organisation).withUser(existingFinanceContactUser).withProject(project).withRole(financeContactRole).build(); when(projectRepositoryMock.findOne(123L)).thenReturn(project); when(roleRepositoryMock.findOneByName(FINANCE_CONTACT.getName())).thenReturn(financeContactRole); List<ProjectUser> existingFinanceContactForOrganisation = simpleFilter(project.getProjectUsers(), projectUser -> projectUser.getOrganisation().equals(organisation) && projectUser.getProject().equals(project) && projectUser.getRole().equals(financeContactRole)); assertEquals(1, existingFinanceContactForOrganisation.size()); ServiceResult<Void> updateResult = service.updateFinanceContact(123L, 5L, 7L); assertTrue(updateResult.isSuccess()); List<ProjectUser> foundFinanceContacts = simpleFilter(project.getProjectUsers(), projectUser -> projectUser.getOrganisation().equals(organisation) && projectUser.getUser().equals(newFinanceContactUser) && projectUser.getProject().equals(project) && projectUser.getRole().equals(financeContactRole)); assertEquals(1, foundFinanceContacts.size()); } @Test public void testUpdateFinanceContactButUserIsNotExistingPartner() { Role projectManagerRole = newRole().withType(PROJECT_MANAGER).build(); Project project = newProject().withId(123L).build(); Organisation organisation = newOrganisation().withId(5L).build(); User user = newUser().withid(7L).build(); newProjectUser().withOrganisation(organisation).withUser(user).withProject(project).withRole(projectManagerRole).build(); when(projectRepositoryMock.findOne(123L)).thenReturn(project); ServiceResult<Void> updateResult = service.updateFinanceContact(123L, 5L, 7L); assertTrue(updateResult.isFailure()); assertTrue(updateResult.getFailure().is(PROJECT_SETUP_FINANCE_CONTACT_MUST_BE_A_PARTNER_ON_THE_PROJECT_FOR_THE_ORGANISATION)); verify(processRoleRepositoryMock, never()).save(isA(ProcessRole.class)); } @Test public void testUpdateFinanceContactWhenNotPresentOnTheProject() { long userIdForUserNotOnProject = 6L; Role partnerRole = newRole().withType(PARTNER).build(); Project existingProject = newProject().withId(123L).build(); Project anotherProject = newProject().withId(9999L).build(); when(projectRepositoryMock.findOne(123L)).thenReturn(existingProject); Organisation organisation = newOrganisation().withId(5L).build(); User user = newUser().withid(7L).build(); newProjectUser().withOrganisation(organisation).withUser(user).withProject(anotherProject).withRole(partnerRole).build(); ServiceResult<Void> updateResult = service.updateFinanceContact(123L, 5L, userIdForUserNotOnProject); assertTrue(updateResult.isFailure()); assertTrue(updateResult.getFailure().is(PROJECT_SETUP_FINANCE_CONTACT_MUST_BE_A_USER_ON_THE_PROJECT_FOR_THE_ORGANISATION)); } @Test public void testUpdateFinanceContactWhenProjectDetailsAlreadySubmitted() { Project project = newProject().withId(123L).withSubmittedDate(LocalDateTime.now()).build(); assertTrue(project.getProjectUsers().isEmpty()); when(projectRepositoryMock.findOne(123L)).thenReturn(project); ServiceResult<Void> updateResult = service.updateFinanceContact(123L, 5L, 7L); assertTrue(updateResult.isFailure()); assertTrue(updateResult.getFailure().is(PROJECT_SETUP_PROJECT_DETAILS_CANNOT_BE_UPDATED_IF_ALREADY_SUBMITTED)); verify(projectRepositoryMock).findOne(123L); assertTrue(project.getProjectUsers().isEmpty()); } @Test public void testFindByUserIdReturnsOnlyDistinctProjects() { Project project = newProject().withId(123L).build(); Organisation organisation = newOrganisation().withId(5L).build(); User user = newUser().withid(7L).build(); Role partnerRole = newRole().withType(PARTNER).build(); Role financeContactRole = newRole().withType(FINANCE_CONTACT).build(); ProjectUser projectUserWithPartnerRole = newProjectUser().withOrganisation(organisation).withUser(user).withProject(project).withRole(partnerRole).build(); ProjectUser projectUserWithFinanceRole = newProjectUser().withOrganisation(organisation).withUser(user).withProject(project).withRole(financeContactRole).build(); List<ProjectUser> projectUserRecords = asList(projectUserWithPartnerRole, projectUserWithFinanceRole); ProjectResource projectResource = newProjectResource().withId(project.getId()).build(); when(projectUserRepositoryMock.findByUserId(user.getId())).thenReturn(projectUserRecords); when(projectMapperMock.mapToResource(project)).thenReturn(projectResource); ServiceResult<List<ProjectResource>> result = service.findByUserId(user.getId()); assertTrue(result.isSuccess()); assertEquals(result.getSuccessObject().size(), 1L); } @Test public void testUpdateProjectAddressToBeRegisteredAddress() { Project project = newProject().withId(1L).build(); Organisation leadOrganisation = newOrganisation().withId(1L).build(); AddressResource existingRegisteredAddressResource = newAddressResource().build(); Address registeredAddress = newAddress().build(); when(projectRepositoryMock.findOne(project.getId())).thenReturn(project); when(organisationRepositoryMock.findOne(organisation.getId())).thenReturn(organisation); when(addressRepositoryMock.exists(existingRegisteredAddressResource.getId())).thenReturn(true); when(addressRepositoryMock.findOne(existingRegisteredAddressResource.getId())).thenReturn(registeredAddress); ServiceResult<Void> result = service.updateProjectAddress(leadOrganisation.getId(), project.getId(), REGISTERED, existingRegisteredAddressResource); assertTrue(result.isSuccess()); } @Test public void testUpdateProjectAddressToBeOperatingAddress() { Project project = newProject().withId(1L).build(); Organisation leadOrganisation = newOrganisation().withId(1L).build(); AddressResource existingOperatingAddressResource = newAddressResource().build(); Address operatingAddress = newAddress().build(); when(projectRepositoryMock.findOne(project.getId())).thenReturn(project); when(organisationRepositoryMock.findOne(organisation.getId())).thenReturn(organisation); when(addressRepositoryMock.exists(existingOperatingAddressResource.getId())).thenReturn(true); when(addressRepositoryMock.findOne(existingOperatingAddressResource.getId())).thenReturn(operatingAddress); ServiceResult<Void> result = service.updateProjectAddress(leadOrganisation.getId(), project.getId(), OPERATING, existingOperatingAddressResource); assertTrue(result.isSuccess()); } @Test public void testUpdateProjectAddressToNewProjectAddress() { Project project = newProject().withId(1L).build(); Organisation leadOrganisation = newOrganisation().withId(1L).build(); AddressResource newAddressResource = newAddressResource().build(); Address newAddress = newAddress().build(); AddressType projectAddressType = newAddressType().withId((long) PROJECT.getOrdinal()).withName(PROJECT.name()).build(); OrganisationAddress organisationAddress = newOrganisationAddress().withOrganisation(leadOrganisation).withAddress(newAddress).withAddressType(projectAddressType).build(); when(projectRepositoryMock.findOne(project.getId())).thenReturn(project); when(organisationRepositoryMock.findOne(organisation.getId())).thenReturn(organisation); when(addressRepositoryMock.exists(newAddressResource.getId())).thenReturn(false); when(addressMapperMock.mapToDomain(newAddressResource)).thenReturn(newAddress); when(addressTypeRepositoryMock.findOne((long) PROJECT.getOrdinal())).thenReturn(projectAddressType); when(organisationAddressRepositoryMock.findByOrganisationIdAndAddressType(leadOrganisation.getId(), projectAddressType)).thenReturn(emptyList()); when(organisationAddressRepositoryMock.save(organisationAddress)).thenReturn(organisationAddress); ServiceResult<Void> result = service.updateProjectAddress(leadOrganisation.getId(), project.getId(), PROJECT, newAddressResource); assertTrue(result.isSuccess()); } @Test public void testSaveProjectSubmitDateTimeIsSuccessfulWhenAllProjectDetailsHaveBeenProvided() { Organisation organisation1 = newOrganisation().build(); Organisation organisation2 = newOrganisation().build(); Organisation organisation3 = newOrganisation().build(); Role projectManagerRole = newRole().withType(PROJECT_MANAGER).build(); ProjectUser projectManagerProjectUser = newProjectUser().withRole(projectManagerRole).build(); Address address = newAddress().build(); Project project = newProject().withId(1L).withAddress(address).withProjectUsers(singletonList(projectManagerProjectUser)).withTargetStartDate(LocalDate.now()).build(); Role financeContactRole = newRole().withType(FINANCE_CONTACT).build(); Role partnerRole = newRole().withType(PARTNER).build(); List<ProjectUser> projectUserObjs; ProjectUser projectUser1WithPartnerRole = newProjectUser().withProject(project).withOrganisation(organisation1).withRole(partnerRole).build(); ProjectUser projectUser1WithFinanceRole = newProjectUser().withProject(project).withOrganisation(organisation1).withRole(financeContactRole).build(); ProjectUser projectUser2WithPartnerRole = newProjectUser().withProject(project).withOrganisation(organisation2).withRole(partnerRole).build(); ProjectUser projectUser2WithFinanceRole = newProjectUser().withProject(project).withOrganisation(organisation2).withRole(financeContactRole).build(); ProjectUser projectUser3WithPartnerRole = newProjectUser().withProject(project).withOrganisation(organisation3).withRole(partnerRole).build(); ProjectUser projectUser3WithFinanceRole = newProjectUser().withProject(project).withOrganisation(organisation3).withRole(financeContactRole).build(); ProjectUserResource projectUser1WithPartnerRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation1.getId()).withRole(partnerRole.getId()).withRoleName(PARTNER.getName()).build(); ProjectUserResource projectUser1WithFinanceRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation1.getId()).withRole(financeContactRole.getId()).withRoleName(FINANCE_CONTACT.getName()).build(); ProjectUserResource projectUser2WithPartnerRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation2.getId()).withRole(partnerRole.getId()).withRoleName(PARTNER.getName()).build(); ProjectUserResource projectUser2WithFinanceRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation2.getId()).withRole(financeContactRole.getId()).withRoleName(FINANCE_CONTACT.getName()).build(); ProjectUserResource projectUser3WithPartnerRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation3.getId()).withRole(partnerRole.getId()).withRoleName(PARTNER.getName()).build(); ProjectUserResource projectUser3WithFinanceRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation3.getId()).withRole(partnerRole.getId()).withRoleName(FINANCE_CONTACT.getName()).build(); ProjectUserResource projectManagerProjectUserResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation3.getId()).withRole(projectManagerRole.getId()).withRoleName(PROJECT_MANAGER.getName()).build(); projectUserObjs = asList(projectManagerProjectUser, projectUser1WithPartnerRole, projectUser1WithFinanceRole, projectUser2WithPartnerRole, projectUser2WithFinanceRole, projectUser3WithPartnerRole, projectUser3WithFinanceRole); when(projectRepositoryMock.findOne(1L)).thenReturn(project); when(projectUserRepositoryMock.findByProjectId(1L)).thenReturn(projectUserObjs); when(organisationRepositoryMock.findOne(organisation1.getId())).thenReturn(organisation1); when(organisationRepositoryMock.findOne(organisation2.getId())).thenReturn(organisation2); when(organisationRepositoryMock.findOne(organisation3.getId())).thenReturn(organisation3); when(projectUserMapperMock.mapToResource(projectUser1WithFinanceRole)).thenReturn(projectUser1WithFinanceRoleResource); when(projectUserMapperMock.mapToResource(projectUser1WithPartnerRole)).thenReturn(projectUser1WithPartnerRoleResource); when(projectUserMapperMock.mapToResource(projectUser2WithPartnerRole)).thenReturn(projectUser2WithPartnerRoleResource); when(projectUserMapperMock.mapToResource(projectUser2WithFinanceRole)).thenReturn(projectUser2WithFinanceRoleResource); when(projectUserMapperMock.mapToResource(projectUser3WithPartnerRole)).thenReturn(projectUser3WithPartnerRoleResource); when(projectUserMapperMock.mapToResource(projectUser3WithFinanceRole)).thenReturn(projectUser3WithFinanceRoleResource); when(projectUserMapperMock.mapToResource(projectManagerProjectUser)).thenReturn(projectManagerProjectUserResource); ServiceResult result = service.saveProjectSubmitDateTime(1L, LocalDateTime.now()); assertTrue(result.isSuccess()); } @Test public void testSaveProjectSubmitDateTimeIsUnSuccessfulWhenAFinanceContactIsMissing() { Organisation organisation1 = newOrganisation().build(); Organisation organisation2 = newOrganisation().build(); Organisation organisation3 = newOrganisation().build(); Role projectManagerRole = newRole().withType(PROJECT_MANAGER).build(); ProjectUser projectManagerProjectUser = newProjectUser().withRole(projectManagerRole).build(); Address address = newAddress().build(); Project project = newProject().withId(1L).withAddress(address).withProjectUsers(singletonList(projectManagerProjectUser)).withTargetStartDate(LocalDate.now()).build(); Role financeContactRole = newRole().withType(FINANCE_CONTACT).build(); Role partnerRole = newRole().withType(PARTNER).build(); List<ProjectUser> projectUserObjs; ProjectUser projectUser1WithPartnerRole = newProjectUser().withProject(project).withOrganisation(organisation1).withRole(partnerRole).build(); ProjectUser projectUser1WithFinanceRole = newProjectUser().withProject(project).withOrganisation(organisation1).withRole(financeContactRole).build(); ProjectUser projectUser2WithPartnerRole = newProjectUser().withProject(project).withOrganisation(organisation2).withRole(partnerRole).build(); ProjectUser projectUser2WithFinanceRole = newProjectUser().withProject(project).withOrganisation(organisation2).withRole(financeContactRole).build(); ProjectUser projectUserWithPartnerRole = newProjectUser().withProject(project).withOrganisation(organisation3).withRole(partnerRole).build(); ProjectUserResource projectUser1WithPartnerRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation1.getId()).withRole(partnerRole.getId()).withRoleName(PARTNER.getName()).build(); ProjectUserResource projectUser1WithFinanceRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation1.getId()).withRole(financeContactRole.getId()).withRoleName(FINANCE_CONTACT.getName()).build(); ProjectUserResource projectUser2WithPartnerRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation2.getId()).withRole(partnerRole.getId()).withRoleName(PARTNER.getName()).build(); ProjectUserResource projectUser2WithFinanceRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation2.getId()).withRole(financeContactRole.getId()).withRoleName(FINANCE_CONTACT.getName()).build(); ProjectUserResource projectUserWithPartnerRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation3.getId()).withRole(partnerRole.getId()).withRoleName(PARTNER.getName()).build(); projectUserObjs = asList(projectUser1WithPartnerRole, projectUser1WithFinanceRole, projectUser2WithPartnerRole, projectUser2WithFinanceRole, projectUserWithPartnerRole); when(projectRepositoryMock.findOne(1L)).thenReturn(project); when(projectUserRepositoryMock.findByProjectId(1L)).thenReturn(projectUserObjs); when(organisationRepositoryMock.findOne(organisation1.getId())).thenReturn(organisation1); when(organisationRepositoryMock.findOne(organisation2.getId())).thenReturn(organisation2); when(organisationRepositoryMock.findOne(organisation3.getId())).thenReturn(organisation3); when(projectUserMapperMock.mapToResource(projectUser1WithFinanceRole)).thenReturn(projectUser1WithFinanceRoleResource); when(projectUserMapperMock.mapToResource(projectUser1WithPartnerRole)).thenReturn(projectUser1WithPartnerRoleResource); when(projectUserMapperMock.mapToResource(projectUser2WithPartnerRole)).thenReturn(projectUser2WithPartnerRoleResource); when(projectUserMapperMock.mapToResource(projectUser2WithFinanceRole)).thenReturn(projectUser2WithFinanceRoleResource); when(projectUserMapperMock.mapToResource(projectUserWithPartnerRole)).thenReturn(projectUserWithPartnerRoleResource); ServiceResult<Void> result = service.saveProjectSubmitDateTime(1L, LocalDateTime.now()); assertTrue(result.isFailure()); assertTrue(result.getFailure().is(PROJECT_SETUP_PROJECT_DETAILS_CANNOT_BE_SUBMITTED_IF_INCOMPLETE)); } @Test public void testCannotSaveProjectSubmitDateTimeWhenNotAllProjectDetailsHaveBeenProvided() { Organisation organisation1 = newOrganisation().build(); Organisation organisation2 = newOrganisation().build(); Organisation organisation3 = newOrganisation().build(); Role projectManagerRole = newRole().withType(PROJECT_MANAGER).build(); ProjectUser projectManagerProjectUser = newProjectUser().withRole(projectManagerRole).build(); Address address = newAddress().build(); Project project = newProject().withId(1L).withAddress(address).withProjectUsers(singletonList(projectManagerProjectUser)).build(); Role financeContactRole = newRole().withType(FINANCE_CONTACT).build(); Role partnerRole = newRole().withType(PARTNER).build(); List<ProjectUser> projectUserObjs; ProjectUser projectUser1WithPartnerRole = newProjectUser().withProject(project).withOrganisation(organisation1).withRole(partnerRole).build(); ProjectUser projectUser1WithFinanceRole = newProjectUser().withProject(project).withOrganisation(organisation1).withRole(financeContactRole).build(); ProjectUser projectUser2WithPartnerRole = newProjectUser().withProject(project).withOrganisation(organisation2).withRole(partnerRole).build(); ProjectUser projectUser2WithFinanceRole = newProjectUser().withProject(project).withOrganisation(organisation2).withRole(financeContactRole).build(); ProjectUser projectUserWithPartnerRole = newProjectUser().withProject(project).withOrganisation(organisation3).withRole(partnerRole).build(); ProjectUserResource projectUser1WithPartnerRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation1.getId()).withRole(partnerRole.getId()).withRoleName(PARTNER.getName()).build(); ProjectUserResource projectUser1WithFinanceRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation1.getId()).withRole(financeContactRole.getId()).withRoleName(FINANCE_CONTACT.getName()).build(); ProjectUserResource projectUser2WithPartnerRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation2.getId()).withRole(partnerRole.getId()).withRoleName(PARTNER.getName()).build(); ProjectUserResource projectUser2WithFinanceRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation2.getId()).withRole(financeContactRole.getId()).withRoleName(FINANCE_CONTACT.getName()).build(); ProjectUserResource projectUserWithPartnerRoleResource = newProjectUserResource().withProject(project.getId()).withOrganisation(organisation3.getId()).withRole(partnerRole.getId()).withRoleName(PARTNER.getName()).build(); projectUserObjs = asList(projectUser1WithPartnerRole, projectUser1WithFinanceRole, projectUser2WithPartnerRole, projectUser2WithFinanceRole, projectUserWithPartnerRole); when(projectRepositoryMock.findOne(1L)).thenReturn(project); when(projectUserRepositoryMock.findByProjectId(1L)).thenReturn(projectUserObjs); when(organisationRepositoryMock.findOne(organisation1.getId())).thenReturn(organisation1); when(organisationRepositoryMock.findOne(organisation2.getId())).thenReturn(organisation2); when(organisationRepositoryMock.findOne(organisation3.getId())).thenReturn(organisation3); when(projectUserMapperMock.mapToResource(projectUser1WithFinanceRole)).thenReturn(projectUser1WithFinanceRoleResource); when(projectUserMapperMock.mapToResource(projectUser1WithPartnerRole)).thenReturn(projectUser1WithPartnerRoleResource); when(projectUserMapperMock.mapToResource(projectUser2WithPartnerRole)).thenReturn(projectUser2WithPartnerRoleResource); when(projectUserMapperMock.mapToResource(projectUser2WithFinanceRole)).thenReturn(projectUser2WithFinanceRoleResource); when(projectUserMapperMock.mapToResource(projectUserWithPartnerRole)).thenReturn(projectUserWithPartnerRoleResource); ServiceResult<Void> result = service.saveProjectSubmitDateTime(1L, LocalDateTime.now()); assertTrue(result.isFailure()); assertTrue(result.getFailure().is(PROJECT_SETUP_PROJECT_DETAILS_CANNOT_BE_SUBMITTED_IF_INCOMPLETE)); } @Test public void testSaveMOWithDiffProjectIdInURLAndMOResource() { Long projectid = 1L; MonitoringOfficerResource monitoringOfficerResource = newMonitoringOfficerResource() .withProject(3L) .withFirstName("abc") .withLastName("xyz") .withEmail("[email protected]") .withPhoneNumber("078323455") .build(); ServiceResult<Void> result = service.saveMonitoringOfficer(projectid, monitoringOfficerResource); assertTrue(result.getFailure().is(PROJECT_SETUP_PROJECT_ID_IN_URL_MUST_MATCH_PROJECT_ID_IN_MONITORING_OFFICER_RESOURCE)); } @Test public void testSaveMOWhenProjectDetailsNotYetSubmitted() { Long projectid = 1L; Project projectInDB = newProject().withId(1L).build(); when(projectRepositoryMock.findOne(projectid)).thenReturn(projectInDB); ServiceResult<Void> result = service.saveMonitoringOfficer(projectid, monitoringOfficerResource); assertTrue(result.getFailure().is(PROJECT_SETUP_MONITORING_OFFICER_CANNOT_BE_ASSIGNED_UNTIL_PROJECT_DETAILS_SUBMITTED)); } @Test public void testSaveMOWhenMOExistsForAProject() { Long projectid = 1L; // Set this to different values, so that we can assert that it gets updated MonitoringOfficer monitoringOfficerInDB = MonitoringOfficerBuilder.newMonitoringOfficer() .withFirstName("def") .withLastName("klm") .withEmail("[email protected]") .withPhoneNumber("079237439") .build(); Project projectInDB = newProject().withId(1L).withSubmittedDate(LocalDateTime.now()).build(); when(projectRepositoryMock.findOne(projectid)).thenReturn(projectInDB); when(monitoringOfficerRepository.findOneByProjectId(monitoringOfficerResource.getProject())).thenReturn(monitoringOfficerInDB); ServiceResult<Void> result = service.saveMonitoringOfficer(projectid, monitoringOfficerResource); // Assert that the MO in DB is updated with the correct values from MO Resource Assert.assertEquals("First name of MO in DB should be updated with the value from MO Resource", monitoringOfficerInDB.getFirstName(), monitoringOfficerResource.getFirstName()); Assert.assertEquals("Last name of MO in DB should be updated with the value from MO Resource", monitoringOfficerInDB.getLastName(), monitoringOfficerResource.getLastName()); Assert.assertEquals("Email of MO in DB should be updated with the value from MO Resource", monitoringOfficerInDB.getEmail(), monitoringOfficerResource.getEmail()); Assert.assertEquals("Phone number of MO in DB should be updated with the value from MO Resource", monitoringOfficerInDB.getPhoneNumber(), monitoringOfficerResource.getPhoneNumber()); assertTrue(result.isSuccess()); } @Test public void testSaveMOWhenMODoesNotExistForAProject() { Long projectid = 1L; Project projectInDB = newProject().withId(1L).withSubmittedDate(LocalDateTime.now()).build(); when(projectRepositoryMock.findOne(projectid)).thenReturn(projectInDB); when(monitoringOfficerRepository.findOneByProjectId(monitoringOfficerResource.getProject())).thenReturn(null); ServiceResult<Void> result = service.saveMonitoringOfficer(projectid, monitoringOfficerResource); assertTrue(result.isSuccess()); } @Test public void testGetMonitoringOfficerWhenMODoesNotExistInDB() { Long projectid = 1L; ServiceResult<MonitoringOfficerResource> result = service.getMonitoringOfficer(projectid); String errorKey = result.getFailure().getErrors().get(0).getErrorKey(); Assert.assertEquals(CommonFailureKeys.GENERAL_NOT_FOUND.name(), errorKey); } @Test public void testGetMonitoringOfficerWhenMOExistsInDB() { Long projectid = 1L; MonitoringOfficer monitoringOfficerInDB = MonitoringOfficerBuilder.newMonitoringOfficer() .withFirstName("def") .withLastName("klm") .withEmail("[email protected]") .withPhoneNumber("079237439") .build(); when(monitoringOfficerRepository.findOneByProjectId(projectid)).thenReturn(monitoringOfficerInDB); ServiceResult<MonitoringOfficerResource> result = service.getMonitoringOfficer(projectid); assertTrue(result.isSuccess()); } @Test public void testCreateCollaborationAgreementFileEntry() { assertCreateFile( project::getCollaborationAgreement, (fileToCreate, inputStreamSupplier) -> service.createCollaborationAgreementFileEntry(123L, fileToCreate, inputStreamSupplier)); } @Test public void testUpdateCollaborationAgreementFileEntry() { assertUpdateFile( project::getCollaborationAgreement, (fileToUpdate, inputStreamSupplier) -> service.updateCollaborationAgreementFileEntry(123L, fileToUpdate, inputStreamSupplier)); } @Test public void testGetCollaborationAgreementFileEntryDetails() { assertGetFileDetails( project::setCollaborationAgreement, () -> service.getCollaborationAgreementFileEntryDetails(123L)); } @Test public void testGetCollaborationAgreementFileContents() { assertGetFileContents( project::setCollaborationAgreement, () -> service.getCollaborationAgreementFileContents(123L)); } @Test public void testDeleteCollaborationAgreementFile() { assertDeleteFile( project::getCollaborationAgreement, project::setCollaborationAgreement, () -> service.deleteCollaborationAgreementFile(123L)); } @Test public void testCreateExploitationPlanFileEntry() { assertCreateFile( project::getExploitationPlan, (fileToCreate, inputStreamSupplier) -> service.createExploitationPlanFileEntry(123L, fileToCreate, inputStreamSupplier)); } @Test public void testUpdateExploitationPlanFileEntry() { assertUpdateFile( project::getExploitationPlan, (fileToUpdate, inputStreamSupplier) -> service.updateExploitationPlanFileEntry(123L, fileToUpdate, inputStreamSupplier)); } @Test public void testGetExploitationPlanFileEntryDetails() { assertGetFileDetails( project::setExploitationPlan, () -> service.getExploitationPlanFileEntryDetails(123L)); } @Test public void testGetExploitationPlanFileContents() { assertGetFileContents( project::setExploitationPlan, () -> service.getExploitationPlanFileContents(123L)); } @Test public void testDeleteExploitationPlanFile() { assertDeleteFile( project::getExploitationPlan, project::setExploitationPlan, () -> service.deleteExploitationPlanFile(123L)); } @Test public void testRetrieveUploadedFilesExist() { assertUploadedFilesExist( project::setCollaborationAgreement, project::setExploitationPlan, () -> service.retrieveUploadedDocuments(123L)); } @Test public void testFilesCanBeSubmitted() { assertFilesCanBeSubmittedByProjectManagerAndFilesExist( project::setCollaborationAgreement, project::setExploitationPlan, () -> service.isOtherDocumentsSubmitAllowed(123L)); } @Test public void testFilesCannotBeSubmittedIfUserNotProjectManager() { assertFilesCannotBeSubmittedIfNotByProjectManager( project::setCollaborationAgreement, project::setExploitationPlan, () -> service.isOtherDocumentsSubmitAllowed(123L)); } private void assertFilesCannotBeSubmittedIfNotByProjectManager(Consumer<FileEntry> fileSetter1, Consumer<FileEntry> fileSetter2, Supplier<ServiceResult<Boolean>> getConditionFn) { List<ProjectUser> projectUsers = new ArrayList<>(); Arrays.stream(UserRoleType.values()) .filter(roleType -> !roleType.getName().equals(UserRoleType.PROJECT_MANAGER.getName())) .forEach(roleType -> { ProjectUser projectUser = newProjectUser() .withId(3L) .withRole(roleType) .build(); projectUsers.add(projectUser); }); when(projectUserRepositoryMock.findByProjectId(123L)).thenReturn(projectUsers); Supplier<InputStream> inputStreamSupplier1 = () -> null; Supplier<InputStream> inputStreamSupplier2 = () -> null; getFileEntryResources(fileSetter1, fileSetter2, inputStreamSupplier1, inputStreamSupplier2); ServiceResult<Boolean> result = getConditionFn.get(); assertFalse(result.isSuccess()); assertTrue(result.isFailure()); } private void assertFilesCanBeSubmittedByProjectManagerAndFilesExist(Consumer<FileEntry> fileSetter1, Consumer<FileEntry> fileSetter2, Supplier<ServiceResult<Boolean>> getConditionFn) { ProjectUser projectUserToSet = newProjectUser() .withId(1L) .withRole(projectManagerRole) .build(); List<ProjectUser> projectUsers = new ArrayList<>(); projectUsers.add(projectUserToSet); when(projectUserRepositoryMock.findByProjectId(123L)).thenReturn(projectUsers); Supplier<InputStream> inputStreamSupplier1 = () -> null; Supplier<InputStream> inputStreamSupplier2 = () -> null; getFileEntryResources(fileSetter1, fileSetter2, inputStreamSupplier1, inputStreamSupplier2); ServiceResult<Boolean> result = getConditionFn.get(); assertTrue(result.isSuccess()); assertTrue(result.getSuccessObject()); } private void assertUploadedFilesExist(Consumer<FileEntry> fileSetter1, Consumer<FileEntry> fileSetter2, Supplier<List<ServiceResult<FileAndContents>>> getFileContentsFnForFiles) { Supplier<InputStream> inputStreamSupplier1 = () -> null; Supplier<InputStream> inputStreamSupplier2 = () -> null; List<FileEntryResource> fileEntryResourcesToGet = getFileEntryResources(fileSetter1, fileSetter2, inputStreamSupplier1, inputStreamSupplier2); List<ServiceResult<FileAndContents>> results = getFileContentsFnForFiles.get(); assertTrue(results.get(0).isSuccess()); assertTrue(results.get(1).isSuccess()); assertEquals(fileEntryResourcesToGet.get(0), results.get(0).getSuccessObject().getFileEntry()); assertEquals(fileEntryResourcesToGet.get(1), results.get(1).getSuccessObject().getFileEntry()); assertEquals(inputStreamSupplier1, results.get(0).getSuccessObject().getContentsSupplier()); assertEquals(inputStreamSupplier2, results.get(1).getSuccessObject().getContentsSupplier()); } private List<FileEntryResource> getFileEntryResources(Consumer<FileEntry> fileSetter1, Consumer<FileEntry> fileSetter2, Supplier<InputStream> inputStreamSupplier1, Supplier<InputStream> inputStreamSupplier2) { FileEntry fileEntry1ToGet = newFileEntry().build(); FileEntry fileEntry2ToGet = newFileEntry().build(); List<FileEntryResource> fileEntryResourcesToGet = newFileEntryResource().withFilesizeBytes(100).build(2); fileSetter1.accept(fileEntry1ToGet); fileSetter2.accept(fileEntry2ToGet); when(fileServiceMock.getFileByFileEntryId(fileEntry1ToGet.getId())).thenReturn(serviceSuccess(inputStreamSupplier1)); when(fileServiceMock.getFileByFileEntryId(fileEntry2ToGet.getId())).thenReturn(serviceSuccess(inputStreamSupplier2)); when(fileEntryMapperMock.mapToResource(fileEntry1ToGet)).thenReturn(fileEntryResourcesToGet.get(0)); when(fileEntryMapperMock.mapToResource(fileEntry2ToGet)).thenReturn(fileEntryResourcesToGet.get(1)); return fileEntryResourcesToGet; } private void assertGetFileContents(Consumer<FileEntry> fileSetter, Supplier<ServiceResult<FileAndContents>> getFileContentsFn) { FileEntry fileToGet = newFileEntry().build(); Supplier<InputStream> inputStreamSupplier = () -> null; FileEntryResource fileResourceToGet = newFileEntryResource().build(); fileSetter.accept(fileToGet); when(fileServiceMock.getFileByFileEntryId(fileToGet.getId())).thenReturn(serviceSuccess(inputStreamSupplier)); when(fileEntryMapperMock.mapToResource(fileToGet)).thenReturn(fileResourceToGet); ServiceResult<FileAndContents> result = getFileContentsFn.get(); assertTrue(result.isSuccess()); assertEquals(fileResourceToGet, result.getSuccessObject().getFileEntry()); assertEquals(inputStreamSupplier, result.getSuccessObject().getContentsSupplier()); } private void assertCreateFile(Supplier<FileEntry> fileGetter, BiFunction<FileEntryResource, Supplier<InputStream>, ServiceResult<FileEntryResource>> createFileFn) { FileEntryResource fileToCreate = newFileEntryResource().build(); Supplier<InputStream> inputStreamSupplier = () -> null; FileEntry createdFile = newFileEntry().build(); FileEntryResource createdFileResource = newFileEntryResource().build(); when(fileServiceMock.createFile(fileToCreate, inputStreamSupplier)).thenReturn(serviceSuccess(Pair.of(new File("blah"), createdFile))); when(fileEntryMapperMock.mapToResource(createdFile)).thenReturn(createdFileResource); ServiceResult<FileEntryResource> result = createFileFn.apply(fileToCreate, inputStreamSupplier); assertTrue(result.isSuccess()); assertEquals(createdFileResource, result.getSuccessObject()); assertEquals(createdFile, fileGetter.get()); } private void assertGetFileDetails(Consumer<FileEntry> fileSetter, Supplier<ServiceResult<FileEntryResource>> getFileDetailsFn) { FileEntry fileToGet = newFileEntry().build(); FileEntryResource fileResourceToGet = newFileEntryResource().build(); fileSetter.accept(fileToGet); when(fileEntryMapperMock.mapToResource(fileToGet)).thenReturn(fileResourceToGet); ServiceResult<FileEntryResource> result = getFileDetailsFn.get(); assertTrue(result.isSuccess()); assertEquals(fileResourceToGet, result.getSuccessObject()); } private void assertDeleteFile(Supplier<FileEntry> fileGetter, Consumer<FileEntry> fileSetter, Supplier<ServiceResult<Void>> deleteFileFn) { FileEntry fileToDelete = newFileEntry().build(); fileSetter.accept(fileToDelete); when(fileServiceMock.deleteFile(fileToDelete.getId())).thenReturn(serviceSuccess(fileToDelete)); ServiceResult<Void> result = deleteFileFn.get(); assertTrue(result.isSuccess()); assertNull(fileGetter.get()); verify(fileServiceMock).deleteFile(fileToDelete.getId()); } private void assertUpdateFile(Supplier<FileEntry> fileGetter, BiFunction<FileEntryResource, Supplier<InputStream>, ServiceResult<Void>> updateFileFn) { FileEntryResource fileToUpdate = newFileEntryResource().build(); Supplier<InputStream> inputStreamSupplier = () -> null; FileEntry updatedFile = newFileEntry().build(); FileEntryResource updatedFileResource = newFileEntryResource().build(); when(fileServiceMock.updateFile(fileToUpdate, inputStreamSupplier)).thenReturn(serviceSuccess(Pair.of(new File("blah"), updatedFile))); when(fileEntryMapperMock.mapToResource(updatedFile)).thenReturn(updatedFileResource); ServiceResult<Void> result = updateFileFn.apply(fileToUpdate, inputStreamSupplier); assertTrue(result.isSuccess()); assertEquals(updatedFile, fileGetter.get()); verify(fileServiceMock).updateFile(fileToUpdate, inputStreamSupplier); } private Project createProjectExpectationsFromOriginalApplication(Application application) { assertFalse(application.getProcessRoles().isEmpty()); return createLambdaMatcher(project -> { assertEquals(application.getName(), project.getName()); assertEquals(application.getDurationInMonths(), project.getDurationInMonths()); assertEquals(application.getStartDate(), project.getTargetStartDate()); assertFalse(project.getProjectUsers().isEmpty()); assertNull(project.getAddress()); List<ProcessRole> collaborativeRoles = simpleFilter(application.getProcessRoles(), ProcessRole::isLeadApplicantOrCollaborator); assertEquals(collaborativeRoles.size(), project.getProjectUsers().size()); collaborativeRoles.forEach(processRole -> { List<ProjectUser> matchingProjectUser = simpleFilter(project.getProjectUsers(), projectUser -> projectUser.getOrganisation().equals(processRole.getOrganisation()) && projectUser.getUser().equals(processRole.getUser())); assertEquals(1, matchingProjectUser.size()); assertEquals(PARTNER.getName(), matchingProjectUser.get(0).getRole().getName()); assertEquals(project, matchingProjectUser.get(0).getProject()); }); }); } @Override protected ProjectService supplyServiceUnderTest() { return new ProjectServiceImpl(); } }
INFUND-3530 more tests
ifs-data-service/src/test/java/com/worth/ifs/project/transactional/ProjectServiceImplTest.java
INFUND-3530 more tests
<ide><path>fs-data-service/src/test/java/com/worth/ifs/project/transactional/ProjectServiceImplTest.java <ide> import static com.worth.ifs.address.builder.AddressTypeBuilder.newAddressType; <ide> import static com.worth.ifs.address.resource.OrganisationAddressType.*; <ide> import static com.worth.ifs.application.builder.ApplicationBuilder.newApplication; <add>import static com.worth.ifs.commons.error.CommonErrors.badRequestError; <ide> import static com.worth.ifs.commons.error.CommonErrors.notFoundError; <ide> import static com.worth.ifs.commons.error.CommonFailureKeys.*; <ide> import static com.worth.ifs.commons.service.ServiceResult.serviceSuccess; <ide> } <ide> <ide> <add> <add> @Test <add> public void testAddPartnerOrganisationNotOnProject(){ <add> Organisation o = newOrganisation().build(); <add> Organisation organisationNotOnProject = newOrganisation().build(); <add> User u = newUser().build(); <add> List<ProjectUser> pu = newProjectUser().withRole(PARTNER).withUser(u).withOrganisation(o).build(1); <add> Project p = newProject().withProjectUsers(pu).build(); <add> when(projectRepositoryMock.findOne(p.getId())).thenReturn(p); <add> when(organisationRepositoryMock.findOne(o.getId())).thenReturn(o); <add> when(organisationRepositoryMock.findOne(organisationNotOnProject.getId())).thenReturn(organisationNotOnProject); <add> when(userRepositoryMock.findOne(u.getId())).thenReturn(u); <add> // Method under test <add> ServiceResult<Void> shouldFail = service.addPartner(p.getId(), u.getId(), organisationNotOnProject.getId()); <add> // Expectations <add> assertTrue(shouldFail.isFailure()); <add> assertTrue(shouldFail.getFailure().is(badRequestError("project does not contain organisation"))); <add> } <add> <add> @Test <add> public void testAddPartnerPartnerAlreadyExists(){ <add> Organisation o = newOrganisation().build(); <add> User u = newUser().build(); <add> List<ProjectUser> pu = newProjectUser().withRole(PARTNER).withUser(u).withOrganisation(o).build(1); <add> Project p = newProject().withProjectUsers(pu).build(); <add> when(projectRepositoryMock.findOne(p.getId())).thenReturn(p); <add> when(organisationRepositoryMock.findOne(o.getId())).thenReturn(o); <add> when(userRepositoryMock.findOne(u.getId())).thenReturn(u); <add> // Method under test <add> ServiceResult<Void> shouldFail = service.addPartner(p.getId(), u.getId(), o.getId()); <add> // Expectations <add> verifyZeroInteractions(projectUserRepositoryMock); <add> assertTrue(shouldFail.isSuccess()); <add> } <add> <add> @Test <add> public void testAddPartner(){ <add> Organisation o = newOrganisation().build(); <add> User u = newUser().build(); <add> List<ProjectUser> pu = newProjectUser().withRole(PARTNER).withUser(u).withOrganisation(o).build(1); <add> Project p = newProject().withProjectUsers(pu).build(); <add> User newUser = newUser().build(); <add> when(projectRepositoryMock.findOne(p.getId())).thenReturn(p); <add> when(organisationRepositoryMock.findOne(o.getId())).thenReturn(o); <add> when(userRepositoryMock.findOne(u.getId())).thenReturn(u); <add> when(userRepositoryMock.findOne(newUser.getId())).thenReturn(u); <add> // Method under test <add> ServiceResult<Void> shouldFail = service.addPartner(p.getId(), newUser.getId(), o.getId()); <add> // Expectations <add> verify(projectUserRepositoryMock).save(isA(ProjectUser.class)); <add> assertTrue(shouldFail.isSuccess()); <add> } <add> <add> <ide> private void assertFilesCannotBeSubmittedIfNotByProjectManager(Consumer<FileEntry> fileSetter1, <ide> Consumer<FileEntry> fileSetter2, <ide> Supplier<ServiceResult<Boolean>> getConditionFn) {
Java
apache-2.0
d56c093c2a0f42ba29485a32531d111c5f194878
0
luxmeter/webserver-demo,luxmeter/webserver-demo
package luxmeter.filter; import luxmeter.model.HttpExchangeMock; import org.junit.Test; import java.io.IOException; import java.net.HttpURLConnection; import java.net.URI; import java.nio.file.Paths; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertThat; public class RequestValidationFilterTest { private RequestValidationFilter testUnit = new RequestValidationFilter(Paths.get(System.getProperty("user.dir"))); @Test public void shouldReturnNotFound() throws IOException { HttpExchangeMock httpExchange = new HttpExchangeMock( URI.create("http://localhost:8080/res_does_not_exist"), "GET"); testUnit.doFilter(httpExchange, null); assertThat(httpExchange.getResponseCode(), equalTo(HttpURLConnection.HTTP_NOT_FOUND)); assertThat(httpExchange.responseBodyToString(), endsWith(RequestValidationFilter.ERROR_MSG_RESOURCE_NOT_FOUND)); } @Test public void shouldReturnBadRequest() throws IOException { HttpExchangeMock httpExchange = new HttpExchangeMock( URI.create("http://localhost:8080/some_file.md"), "NOT_SUPPORTED_REQUEST"); testUnit.doFilter(httpExchange, null); assertThat(httpExchange.getResponseCode(), equalTo(HttpURLConnection.HTTP_BAD_METHOD)); assertThat(httpExchange.responseBodyToString(), endsWith(RequestValidationFilter.ERROR_MSG_NOT_SUPPORTED_REQUEST)); } }
src/test/java/luxmeter/filter/RequestValidationFilterTest.java
package luxmeter.filter; import luxmeter.model.HttpExchangeMock; import org.junit.Test; import java.io.IOException; import java.net.HttpURLConnection; import java.net.URI; import java.nio.file.Paths; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertThat; public class RequestValidationFilterTest { private RequestValidationFilter testUnit = new RequestValidationFilter(Paths.get(System.getProperty("user.dir"))); @Test public void shouldReturnNotFound() throws IOException { HttpExchangeMock httpExchange = new HttpExchangeMock( URI.create("http://localhost:8080/res_does_not_exist"), "GET"); testUnit.doFilter(httpExchange, null); assertThat(httpExchange.getResponseCode(), equalTo(HttpURLConnection.HTTP_NOT_FOUND)); assertThat(httpExchange.responseBodyToString(), equalTo(RequestValidationFilter.ERROR_MSG_RESOURCE_NOT_FOUND)); } @Test public void shouldReturnBadRequest() throws IOException { HttpExchangeMock httpExchange = new HttpExchangeMock( URI.create("http://localhost:8080/some_file.md"), "NOT_SUPPORTED_REQUEST"); testUnit.doFilter(httpExchange, null); assertThat(httpExchange.getResponseCode(), equalTo(HttpURLConnection.HTTP_BAD_METHOD)); assertThat(httpExchange.responseBodyToString(), equalTo(RequestValidationFilter.ERROR_MSG_NOT_SUPPORTED_REQUEST)); } }
fixed test
src/test/java/luxmeter/filter/RequestValidationFilterTest.java
fixed test
<ide><path>rc/test/java/luxmeter/filter/RequestValidationFilterTest.java <ide> import java.net.URI; <ide> import java.nio.file.Paths; <ide> <add>import static org.hamcrest.Matchers.endsWith; <ide> import static org.hamcrest.Matchers.equalTo; <ide> import static org.junit.Assert.assertThat; <ide> <ide> testUnit.doFilter(httpExchange, null); <ide> assertThat(httpExchange.getResponseCode(), equalTo(HttpURLConnection.HTTP_NOT_FOUND)); <ide> assertThat(httpExchange.responseBodyToString(), <del> equalTo(RequestValidationFilter.ERROR_MSG_RESOURCE_NOT_FOUND)); <add> endsWith(RequestValidationFilter.ERROR_MSG_RESOURCE_NOT_FOUND)); <ide> } <ide> <ide> @Test <ide> testUnit.doFilter(httpExchange, null); <ide> assertThat(httpExchange.getResponseCode(), equalTo(HttpURLConnection.HTTP_BAD_METHOD)); <ide> assertThat(httpExchange.responseBodyToString(), <del> equalTo(RequestValidationFilter.ERROR_MSG_NOT_SUPPORTED_REQUEST)); <add> endsWith(RequestValidationFilter.ERROR_MSG_NOT_SUPPORTED_REQUEST)); <ide> } <ide> }
Java
apache-2.0
793e3e07715cadbaebda3c711318385e97dff4d2
0
CMPUT301W16T07/TeamName
package com.teamname.tutortrader; import android.app.Activity; import android.app.Instrumentation; import android.graphics.Bitmap; import android.test.ActivityInstrumentationTestCase2; import android.test.UiThreadTest; import android.util.Log; import android.widget.EditText; import android.widget.TextView; import com.robotium.solo.Solo; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import io.searchbox.client.JestResult; import io.searchbox.core.Delete; import io.searchbox.core.Search; import io.searchbox.core.SearchResult; /** * This will test the addSessionActivity by adding a new activity and see that it has been * created * * @see AddSessionActivity for more details about what it does */ public class AvailableSessionsActivityTest extends ActivityInstrumentationTestCase2 { Instrumentation instrumentation; Activity activity; EditText titleInput; EditText descriptionInput; EditText searchInput; Solo solo; public void setUp() throws Exception { super.setUp(); //setUp() is run before a test case is started. //This is where the solo object is create d. solo = new Solo(getInstrumentation()); getActivity(); } @Override public void tearDown() throws Exception { //tearDown() is run after a test case has finished. //finishOpenedActivities() will finish all the activities that have been opened during the test execution. solo.finishOpenedActivities(); super.tearDown(); } public AvailableSessionsActivityTest() { super(AvailableSessionsActivity.class); } public void testTest(){ solo.clickOnMenuItem("Profile"); solo.sleep(2000); solo.clickOnMenuItem("Available"); } public void testViewAvailable() { Profile profile = new Profile("Test tutor", "[email protected]", "780-666-6666"); Bitmap.Config conf = Bitmap.Config.ARGB_8888; Bitmap bm1 = Bitmap.createBitmap(1, 2, conf); Session session = new Session("Math", "Tutor for linear Algebra for all university levels", profile.getProfileID(), bm1); ElasticSearchController.AddProfileTask profileTask = new ElasticSearchController.AddProfileTask(); profileTask.execute(profile); ElasticSearchController.AddSessionTask addSessionTask = new ElasticSearchController.AddSessionTask(); addSessionTask.execute(session); solo.clickOnMenuItem("Available"); assertTrue(solo.searchText("Math")); assertEquals("this is the title we expected", session.getTitle(), "Math"); assertEquals("this is the description we expected", session.getDescription(), "Tutor for linear Algebra for all university levels"); /** * Testing UseCase 04.01.01 and 04.02.01 - Searching * "As a borrower, I want to specify a set of keywords, and search for all things not currently * borrowed whose description contains all keywords." and "As a borrower, I want search results * to show each thing not currently borrowed with its description, owner username, and status." * <p/> * We will type things into the search bar. Then we will click search. This will then * bring up a new screen with the search results. */ solo.clickOnMenuItem("Available"); solo.assertCurrentActivity("right activity", AvailableSessionsActivity.class); solo.typeText(0, "Math"); solo.clickOnButton("Search"); assertTrue(solo.searchText("Math", 2)); assertTrue(solo.searchText("Tutor for linear Algebra for all university levels")); /** * Testing UseCase 01.04.01 - ViewOneSession * "As an owner, I want to view one of my things, its description and status." * <p/> * We will perform a click on list entry to bring us to the ViewOneSession view. * From here we test to see that all the buttons are present, and all the TextViews are * accurate */ solo.assertCurrentActivity("right activity", AvailableSessionsActivity.class); solo.clickOnText("Math"); assertTrue(solo.searchText("Math")); assertTrue(solo.searchText("Tutor for linear Algebra for all university levels")); assertTrue(solo.searchText("Test tutor")); assertTrue(solo.searchText("780-666-6666")); assertTrue(solo.searchText("[email protected]")); /** * Testing Use Case 01.05.01 - DeleteSession * "As an owner, I want to delete a thing in my things." * * To test this we create a session, then change the activities to ge to the EditSession * view. The first test case tests if the delete occured when the user confirmed the delete. * * The second test tests the case when the user does not confirm the delete. */ solo.clickOnMenuItem("Available"); solo.assertCurrentActivity("right activity", AvailableSessionsActivity.class); Profile owner = MethodsController.getProfile(session.getTutorID()); ElasticSearchController.RemoveProfileTask removeProfileTask = new ElasticSearchController.RemoveProfileTask(); removeProfileTask.execute(owner.getProfileID()); ElasticSearchController.RemoveSessionTask removeSessionTask = new ElasticSearchController.RemoveSessionTask(); removeSessionTask.execute(session.getSessionID()); assertFalse(solo.searchText("Math")); } }
TutorTrader/app/src/androidTest/java/com/teamname/tutortrader/AvailableSessionsActivityTest.java
package com.teamname.tutortrader; import android.app.Activity; import android.app.Instrumentation; import android.graphics.Bitmap; import android.test.ActivityInstrumentationTestCase2; import android.test.UiThreadTest; import android.util.Log; import android.widget.EditText; import android.widget.TextView; import com.robotium.solo.Solo; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import io.searchbox.client.JestResult; import io.searchbox.core.Delete; import io.searchbox.core.Search; import io.searchbox.core.SearchResult; /** * This will test the addSessionActivity by adding a new activity and see that it has been * created * * @see AddSessionActivity for more details about what it does */ public class AvailableSessionsActivityTest extends ActivityInstrumentationTestCase2 { Instrumentation instrumentation; Activity activity; EditText titleInput; EditText descriptionInput; EditText searchInput; Solo solo; public void setUp() throws Exception { super.setUp(); //setUp() is run before a test case is started. //This is where the solo object is create d. solo = new Solo(getInstrumentation()); getActivity(); } @Override public void tearDown() throws Exception { //tearDown() is run after a test case has finished. //finishOpenedActivities() will finish all the activities that have been opened during the test execution. solo.finishOpenedActivities(); super.tearDown(); } public AvailableSessionsActivityTest() { super(AvailableSessionsActivity.class); } /** * Testing "Things" Use Cases */ /** USECASE 1 - AddSession * createSession(title, description) fills in the input text field and * clicks the 'save' button for the activity under test: */ public void testViewAvailable() { Profile profile = new Profile("Test tutor", "[email protected]", "780-666-6666"); Bitmap.Config conf = Bitmap.Config.ARGB_8888; Bitmap bm1 = Bitmap.createBitmap(1, 2, conf); Session session = new Session("Math", "Tutor for linear Algebra for all university levels", profile.getProfileID(), bm1); ElasticSearchController.AddProfileTask profileTask = new ElasticSearchController.AddProfileTask(); profileTask.execute(profile); ElasticSearchController.AddSessionTask addSessionTask = new ElasticSearchController.AddSessionTask(); addSessionTask.execute(session); solo.clickOnMenuItem("Available"); assertTrue(solo.searchText("Math")); assertEquals("this is the title we expected", session.getTitle(), "Math"); assertEquals("this is the description we expected", session.getDescription(), "Tutor for linear Algebra for all university levels"); /** * Testing UseCase 04.01.01 and 04.02.01 - Searching * "As a borrower, I want to specify a set of keywords, and search for all things not currently * borrowed whose description contains all keywords." and "As a borrower, I want search results * to show each thing not currently borrowed with its description, owner username, and status." * <p/> * We will type things into the search bar. Then we will click search. This will then * bring up a new screen with the search results. */ solo.clickOnMenuItem("Available"); solo.assertCurrentActivity("right activity", AvailableSessionsActivity.class); solo.typeText(0, "Math"); solo.clickOnButton("Search"); assertTrue(solo.searchText("Math", 2)); assertTrue(solo.searchText("Tutor for linear Algebra for all university levels")); /** * Testing UseCase 01.04.01 - ViewOneSession * "As an owner, I want to view one of my things, its description and status." * <p/> * We will perform a click on list entry to bring us to the ViewOneSession view. * From here we test to see that all the buttons are present, and all the TextViews are * accurate */ solo.assertCurrentActivity("right activity", AvailableSessionsActivity.class); solo.clickOnText("Math"); assertTrue(solo.searchText("Math")); assertTrue(solo.searchText("Tutor for linear Algebra for all university levels")); assertTrue(solo.searchText("Test tutor")); assertTrue(solo.searchText("780-666-6666")); assertTrue(solo.searchText("[email protected]")); /** * Testing Use Case 01.05.01 - DeleteSession * "As an owner, I want to delete a thing in my things." * * To test this we create a session, then change the activities to ge to the EditSession * view. The first test case tests if the delete occured when the user confirmed the delete. * * The second test tests the case when the user does not confirm the delete. */ solo.clickOnMenuItem("Available"); solo.assertCurrentActivity("right activity", AvailableSessionsActivity.class); Profile owner = MethodsController.getProfile(session.getTutorID()); ElasticSearchController.RemoveProfileTask removeProfileTask = new ElasticSearchController.RemoveProfileTask(); removeProfileTask.execute(owner.getProfileID()); ElasticSearchController.RemoveSessionTask removeSessionTask = new ElasticSearchController.RemoveSessionTask(); removeSessionTask.execute(session.getSessionID()); assertFalse(solo.searchText("Math")); } }
getting alex's commit
TutorTrader/app/src/androidTest/java/com/teamname/tutortrader/AvailableSessionsActivityTest.java
getting alex's commit
<ide><path>utorTrader/app/src/androidTest/java/com/teamname/tutortrader/AvailableSessionsActivityTest.java <ide> super(AvailableSessionsActivity.class); <ide> } <ide> <del> /** <del> * Testing "Things" Use Cases <del> */ <add> public void testTest(){ <add> solo.clickOnMenuItem("Profile"); <add> solo.sleep(2000); <add> solo.clickOnMenuItem("Available"); <add> } <ide> <del> <del> /** USECASE 1 - AddSession <del> * createSession(title, description) fills in the input text field and <del> * clicks the 'save' button for the activity under test: <del> */ <ide> public void testViewAvailable() { <ide> Profile profile = new Profile("Test tutor", "[email protected]", "780-666-6666"); <ide> Bitmap.Config conf = Bitmap.Config.ARGB_8888;
JavaScript
mit
a84d2ae2d1c53c2099c732957742705f9d41bc4e
0
bitpay/bitcore-wallet-service,matiu/bitcore-wallet-service,cmgustavo/bitcore-wallet-service,cmgustavo/bitcore-wallet-service,bitpay/bitcore,troggy/bitcore-wallet-service,troggy/bitcore-wallet-service,martindale/bitcore,bitpay/bitcore,bitpay/bitcore,bitjson/bitcore,bitjson/bitcore,martindale/bitcore,bitjson/bitcore,janko33bd/bitcore-wallet-service,matiu/bitcore-wallet-service,bitjson/bitcore,martindale/bitcore,janko33bd/bitcore-wallet-service,bitpay/bitcore,martindale/bitcore,bitpay/bitcore-wallet-service
'use strict'; var _ = require('lodash'); var async = require('async'); var inspect = require('util').inspect; var chai = require('chai'); var sinon = require('sinon'); var should = chai.should(); var log = require('npmlog'); log.debug = log.verbose; var fs = require('fs'); var tingodb = require('tingodb')({ memStore: true }); var Utils = require('../../lib/utils'); var WalletUtils = require('bitcore-wallet-utils'); var Bitcore = WalletUtils.Bitcore; var Storage = require('../../lib/storage'); var Model = require('../../lib/model'); var WalletService = require('../../lib/server'); var EmailService = require('../../lib/emailservice'); var TestData = require('../testdata'); var CLIENT_VERSION = 'bwc-0.1.1'; var helpers = {}; helpers.getAuthServer = function(copayerId, cb) { var verifyStub = sinon.stub(WalletService.prototype, '_verifySignature'); verifyStub.returns(true); WalletService.getInstanceWithAuth({ copayerId: copayerId, message: 'dummy', signature: 'dummy', clientVersion: 'bwc-0.1.0', }, function(err, server) { verifyStub.restore(); if (err || !server) throw new Error('Could not login as copayerId ' + copayerId); return cb(server); }); }; helpers._generateCopayersTestData = function(n) { console.log('var copayers = ['); _.each(_.range(n), function(c) { var xpriv = new Bitcore.HDPrivateKey(); var xpub = Bitcore.HDPublicKey(xpriv); var xpriv_45H = xpriv.derive(45, true); var xpub_45H = Bitcore.HDPublicKey(xpriv_45H); var id45 = WalletUtils.xPubToCopayerId(xpub_45H.toString()); var xpriv_44H_0H_0H = xpriv.derive(44, true).derive(0, true).derive(0, true); var xpub_44H_0H_0H = Bitcore.HDPublicKey(xpriv_44H_0H_0H); var id44 = WalletUtils.xPubToCopayerId(xpub_44H_0H_0H.toString()); var xpriv_1H = xpriv.derive(1, true); var xpub_1H = Bitcore.HDPublicKey(xpriv_1H); var priv = xpriv_1H.derive(0).privateKey; var pub = xpub_1H.derive(0).publicKey; console.log('{id44: ', "'" + id44 + "',"); console.log('id45: ', "'" + id45 + "',"); console.log('xPrivKey: ', "'" + xpriv.toString() + "',"); console.log('xPubKey: ', "'" + xpub.toString() + "',"); console.log('xPrivKey_45H: ', "'" + xpriv_45H.toString() + "',"); console.log('xPubKey_45H: ', "'" + xpub_45H.toString() + "',"); console.log('xPrivKey_44H_0H_0H: ', "'" + xpriv_44H_0H_0H.toString() + "',"); console.log('xPubKey_44H_0H_0H: ', "'" + xpub_44H_0H_0H.toString() + "',"); console.log('xPrivKey_1H: ', "'" + xpriv_1H.toString() + "',"); console.log('xPubKey_1H: ', "'" + xpub_1H.toString() + "',"); console.log('privKey_1H_0: ', "'" + priv.toString() + "',"); console.log('pubKey_1H_0: ', "'" + pub.toString() + "'},"); }); console.log('];'); }; helpers.getSignedCopayerOpts = function(opts) { var hash = WalletUtils.getCopayerHash(opts.name, opts.xPubKey, opts.requestPubKey); opts.copayerSignature = WalletUtils.signMessage(hash, TestData.keyPair.priv); return opts; }; helpers.createAndJoinWallet = function(m, n, opts, cb) { if (_.isFunction(opts)) { cb = opts; opts = {}; } opts = opts || {}; var server = new WalletService(); var copayerIds = []; var offset = opts.offset || 0; var walletOpts = { name: 'a wallet', m: m, n: n, pubKey: TestData.keyPair.pub, }; if (_.isBoolean(opts.supportBIP44AndP2PKH)) walletOpts.supportBIP44AndP2PKH = opts.supportBIP44AndP2PKH; server.createWallet(walletOpts, function(err, walletId) { if (err) return cb(err); async.each(_.range(n), function(i, cb) { var copayerData = TestData.copayers[i + offset]; var copayerOpts = helpers.getSignedCopayerOpts({ walletId: walletId, name: 'copayer ' + (i + 1), xPubKey: (_.isBoolean(opts.supportBIP44AndP2PKH) && !opts.supportBIP44AndP2PKH) ? copayerData.xPubKey_45H : copayerData.xPubKey_44H_0H_0H, requestPubKey: copayerData.pubKey_1H_0, customData: 'custom data ' + (i + 1), }); if (_.isBoolean(opts.supportBIP44AndP2PKH)) copayerOpts.supportBIP44AndP2PKH = opts.supportBIP44AndP2PKH; server.joinWallet(copayerOpts, function(err, result) { should.not.exist(err); copayerIds.push(result.copayerId); return cb(err); }); }, function(err) { if (err) return new Error('Could not generate wallet'); helpers.getAuthServer(copayerIds[0], function(s) { s.getWallet({}, function(err, w) { cb(s, w); }); }); }); }); }; helpers.randomTXID = function() { return Bitcore.crypto.Hash.sha256(new Buffer(Math.random() * 100000)).toString('hex');; }; helpers.toSatoshi = function(btc) { if (_.isArray(btc)) { return _.map(btc, helpers.toSatoshi); } else { return Utils.strip(btc * 1e8); } }; helpers.stubUtxos = function(server, wallet, amounts, cb) { async.mapSeries(_.range(0, amounts.length > 2 ? 2 : 1), function(i, next) { server.createAddress({}, next); }, function(err, addresses) { should.not.exist(err); addresses.should.not.be.empty; var utxos = _.map([].concat(amounts), function(amount, i) { var address = addresses[i % addresses.length]; var confirmations; if (_.isString(amount) && _.startsWith(amount, 'u')) { amount = parseFloat(amount.substring(1)); confirmations = 0; } else { confirmations = Math.floor(Math.random() * 100 + 1); } var scriptPubKey; switch (wallet.addressType) { case WalletUtils.SCRIPT_TYPES.P2SH: scriptPubKey = Bitcore.Script.buildMultisigOut(address.publicKeys, wallet.m).toScriptHashOut(); break; case WalletUtils.SCRIPT_TYPES.P2PKH: scriptPubKey = Bitcore.Script.buildPublicKeyHashOut(address.address); break; } should.exist(scriptPubKey); return { txid: helpers.randomTXID(), vout: Math.floor(Math.random() * 10 + 1), satoshis: helpers.toSatoshi(amount).toString(), scriptPubKey: scriptPubKey.toBuffer().toString('hex'), address: address.address, confirmations: confirmations, }; }); blockchainExplorer.getUnspentUtxos = function(addresses, cb) { var selected = _.filter(utxos, function(utxo) { return _.contains(addresses, utxo.address); }); return cb(null, selected); }; return cb(utxos); }); }; helpers.stubBroadcast = function(thirdPartyBroadcast) { blockchainExplorer.broadcast = sinon.stub().callsArgWith(1, null, '112233'); blockchainExplorer.getTransaction = sinon.stub().callsArgWith(1, null, null); }; helpers.stubHistory = function(txs) { blockchainExplorer.getTransactions = function(addresses, from, to, cb) { var MAX_BATCH_SIZE = 100; var nbTxs = txs.length; if (_.isUndefined(from) && _.isUndefined(to)) { from = 0; to = MAX_BATCH_SIZE; } if (!_.isUndefined(from) && _.isUndefined(to)) to = from + MAX_BATCH_SIZE; if (!_.isUndefined(from) && !_.isUndefined(to) && to - from > MAX_BATCH_SIZE) to = from + MAX_BATCH_SIZE; if (from < 0) from = 0; if (to < 0) to = 0; if (from > nbTxs) from = nbTxs; if (to > nbTxs) to = nbTxs; var page = txs.slice(from, to); return cb(null, page); }; }; helpers.stubFeeLevels = function(levels) { blockchainExplorer.estimateFee = function(nbBlocks, cb) { var result = _.zipObject(_.map(_.pick(levels, nbBlocks), function(fee, n) { return [+n, fee > 0 ? fee / 1e8 : fee]; })); return cb(null, result); }; }; helpers.stubAddressActivity = function(activeAddresses) { blockchainExplorer.getAddressActivity = function(address, cb) { return cb(null, _.contains(activeAddresses, address)); }; }; helpers.clientSign = WalletUtils.signTxp; helpers.createProposalOptsLegacy = function(toAddress, amount, message, signingKey, feePerKb) { var opts = { toAddress: toAddress, amount: helpers.toSatoshi(amount), message: message, proposalSignature: null, }; if (feePerKb) opts.feePerKb = feePerKb; var hash = WalletUtils.getProposalHash(toAddress, opts.amount, message); try { opts.proposalSignature = WalletUtils.signMessage(hash, signingKey); } catch (ex) {} return opts; }; helpers.createSimpleProposalOpts = function(toAddress, amount, signingKey, opts) { var outputs = [{ toAddress: toAddress, amount: amount, }]; return helpers.createProposalOpts(Model.TxProposal.Types.SIMPLE, outputs, signingKey, opts); }; helpers.createProposalOpts = function(type, outputs, signingKey, moreOpts) { _.each(outputs, function(output) { output.amount = helpers.toSatoshi(output.amount); }); var opts = { type: type, proposalSignature: null }; if (moreOpts) { moreOpts = _.chain(moreOpts) .pick(['feePerKb', 'customData', 'message']) .value(); opts = _.assign(opts, moreOpts); } opts = _.defaults(opts, { message: null }); var hash; if (type == Model.TxProposal.Types.SIMPLE) { opts.toAddress = outputs[0].toAddress; opts.amount = outputs[0].amount; hash = WalletUtils.getProposalHash(opts.toAddress, opts.amount, opts.message, opts.payProUrl); } else if (type == Model.TxProposal.Types.MULTIPLEOUTPUTS) { opts.outputs = outputs; var header = { outputs: outputs, message: opts.message, payProUrl: opts.payProUrl }; hash = WalletUtils.getProposalHash(header); } try { opts.proposalSignature = WalletUtils.signMessage(hash, signingKey); } catch (ex) {} return opts; }; helpers.createAddresses = function(server, wallet, main, change, cb) { async.map(_.range(main + change), function(i, next) { var address = wallet.createAddress(i >= main); server.storage.storeAddressAndWallet(wallet, address, function(err) { if (err) return next(err); next(null, address); }); }, function(err, addresses) { if (err) throw new Error('Could not generate addresses'); return cb(_.take(addresses, main), _.takeRight(addresses, change)); }); }; var storage, blockchainExplorer; var useMongoDb = !!process.env.USE_MONGO_DB; function initStorage(cb) { function getDb(cb) { if (useMongoDb) { var mongodb = require('mongodb'); mongodb.MongoClient.connect('mongodb://localhost:27017/bws_test', function(err, db) { if (err) throw err; return cb(db); }); } else { var db = new tingodb.Db('./db/test', {}); return cb(db); } } getDb(function(db) { storage = new Storage({ db: db }); return cb(); }); }; function resetStorage(cb) { if (!storage.db) return cb(); storage.db.dropDatabase(function(err) { return cb(); }); }; describe('Wallet service', function() { before(function(done) { initStorage(done); }); beforeEach(function(done) { resetStorage(function() { blockchainExplorer = sinon.stub(); WalletService.initialize({ storage: storage, blockchainExplorer: blockchainExplorer, }, done); }); }); after(function(done) { WalletService.shutDown(done); }); describe('Email notifications', function() { var server, wallet, mailerStub, emailService; describe('Shared wallet', function() { beforeEach(function(done) { helpers.createAndJoinWallet(2, 3, function(s, w) { server = s; wallet = w; var i = 0; async.eachSeries(w.copayers, function(copayer, next) { helpers.getAuthServer(copayer.id, function(server) { server.savePreferences({ email: 'copayer' + (++i) + '@domain.com', unit: 'bit', }, next); }); }, function(err) { should.not.exist(err); mailerStub = sinon.stub(); mailerStub.sendMail = sinon.stub(); mailerStub.sendMail.yields(); emailService = new EmailService(); emailService.start({ lockOpts: {}, messageBroker: server.messageBroker, storage: storage, mailer: mailerStub, emailOpts: { from: '[email protected]', subjectPrefix: '[test wallet]', publicTxUrlTemplate: { livenet: 'https://insight.bitpay.com/tx/{{txid}}', testnet: 'https://test-insight.bitpay.com/tx/{{txid}}', }, }, }, function(err) { should.not.exist(err); done(); }); }); }); }); it('should notify copayers a new tx proposal has been created', function(done) { var _readTemplateFile_old = emailService._readTemplateFile; emailService._readTemplateFile = function(language, filename, cb) { if (_.endsWith(filename, '.html')) { return cb(null, '<html><body>{{walletName}}</body></html>'); } else { _readTemplateFile_old.call(emailService, language, filename, cb); } }; helpers.stubUtxos(server, wallet, [1, 1], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.8, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); setTimeout(function() { var calls = mailerStub.sendMail.getCalls(); calls.length.should.equal(2); var emails = _.map(calls, function(c) { return c.args[0]; }); _.difference(['[email protected]', '[email protected]'], _.pluck(emails, 'to')).should.be.empty; var one = emails[0]; one.from.should.equal('[email protected]'); one.subject.should.contain('New payment proposal'); one.text.should.contain(wallet.name); one.text.should.contain(wallet.copayers[0].name); should.exist(one.html); one.html.indexOf('<html>').should.equal(0); one.html.should.contain(wallet.name); server.storage.fetchUnsentEmails(function(err, unsent) { should.not.exist(err); unsent.should.be.empty; emailService._readTemplateFile = _readTemplateFile_old; done(); }); }, 100); }); }); }); it('should not send email if unable to apply template to notification', function(done) { var _applyTemplate_old = emailService._applyTemplate; emailService._applyTemplate = function(template, data, cb) { _applyTemplate_old.call(emailService, template, undefined, cb); }; helpers.stubUtxos(server, wallet, [1, 1], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.8, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); setTimeout(function() { var calls = mailerStub.sendMail.getCalls(); calls.length.should.equal(0); server.storage.fetchUnsentEmails(function(err, unsent) { should.not.exist(err); unsent.should.be.empty; emailService._applyTemplate = _applyTemplate_old; done(); }); }, 100); }); }); }); it('should notify copayers a new outgoing tx has been created', function(done) { var _readTemplateFile_old = emailService._readTemplateFile; emailService._readTemplateFile = function(language, filename, cb) { if (_.endsWith(filename, '.html')) { return cb(null, '<html>{{&urlForTx}}<html>'); } else { _readTemplateFile_old.call(emailService, language, filename, cb); } }; helpers.stubUtxos(server, wallet, [1, 1], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.8, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); var txp; async.waterfall([ function(next) { server.createTx(txOpts, next); }, function(t, next) { txp = t; async.eachSeries(_.range(2), function(i, next) { var copayer = TestData.copayers[i]; helpers.getAuthServer(copayer.id44, function(server) { var signatures = helpers.clientSign(txp, copayer.xPrivKey); server.signTx({ txProposalId: txp.id, signatures: signatures, }, function(err, t) { txp = t; next(); }); }); }, next); }, function(next) { helpers.stubBroadcast(); server.broadcastTx({ txProposalId: txp.id, }, next); }, ], function(err) { should.not.exist(err); setTimeout(function() { var calls = mailerStub.sendMail.getCalls(); var emails = _.map(_.takeRight(calls, 3), function(c) { return c.args[0]; }); _.difference(['[email protected]', '[email protected]', '[email protected]'], _.pluck(emails, 'to')).should.be.empty; var one = emails[0]; one.from.should.equal('[email protected]'); one.subject.should.contain('Payment sent'); one.text.should.contain(wallet.name); one.text.should.contain('800,000'); should.exist(one.html); one.html.should.contain('https://insight.bitpay.com/tx/' + txp.txid); server.storage.fetchUnsentEmails(function(err, unsent) { should.not.exist(err); unsent.should.be.empty; emailService._readTemplateFile = _readTemplateFile_old; done(); }); }, 100); }); }); }); it('should notify copayers a tx has been finally rejected', function(done) { helpers.stubUtxos(server, wallet, 1, function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.8, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); var txpId; async.waterfall([ function(next) { server.createTx(txOpts, next); }, function(txp, next) { txpId = txp.id; async.eachSeries(_.range(1, 3), function(i, next) { var copayer = TestData.copayers[i]; helpers.getAuthServer(copayer.id44, function(server) { server.rejectTx({ txProposalId: txp.id, }, next); }); }, next); }, ], function(err) { should.not.exist(err); setTimeout(function() { var calls = mailerStub.sendMail.getCalls(); var emails = _.map(_.takeRight(calls, 2), function(c) { return c.args[0]; }); _.difference(['[email protected]', '[email protected]'], _.pluck(emails, 'to')).should.be.empty; var one = emails[0]; one.from.should.equal('[email protected]'); one.subject.should.contain('Payment proposal rejected'); one.text.should.contain(wallet.name); one.text.should.contain('copayer 2, copayer 3'); one.text.should.not.contain('copayer 1'); server.storage.fetchUnsentEmails(function(err, unsent) { should.not.exist(err); unsent.should.be.empty; done(); }); }, 100); }); }); }); it('should notify copayers of incoming txs', function(done) { server.createAddress({}, function(err, address) { should.not.exist(err); // Simulate incoming tx notification server._notify('NewIncomingTx', { txid: '999', address: address, amount: 12300000, }, function(err) { setTimeout(function() { var calls = mailerStub.sendMail.getCalls(); calls.length.should.equal(3); var emails = _.map(calls, function(c) { return c.args[0]; }); _.difference(['[email protected]', '[email protected]', '[email protected]'], _.pluck(emails, 'to')).should.be.empty; var one = emails[0]; one.from.should.equal('[email protected]'); one.subject.should.contain('New payment received'); one.text.should.contain(wallet.name); one.text.should.contain('123,000'); server.storage.fetchUnsentEmails(function(err, unsent) { should.not.exist(err); unsent.should.be.empty; done(); }); }, 100); }); }); }); it('should notify each email address only once', function(done) { // Set same email address for copayer1 and copayer2 server.savePreferences({ email: '[email protected]', }, function(err) { server.createAddress({}, function(err, address) { should.not.exist(err); // Simulate incoming tx notification server._notify('NewIncomingTx', { txid: '999', address: address, amount: 12300000, }, function(err) { setTimeout(function() { var calls = mailerStub.sendMail.getCalls(); calls.length.should.equal(2); var emails = _.map(calls, function(c) { return c.args[0]; }); _.difference(['[email protected]', '[email protected]'], _.pluck(emails, 'to')).should.be.empty; var one = emails[0]; one.from.should.equal('[email protected]'); one.subject.should.contain('New payment received'); one.text.should.contain(wallet.name); one.text.should.contain('123,000'); server.storage.fetchUnsentEmails(function(err, unsent) { should.not.exist(err); unsent.should.be.empty; done(); }); }, 100); }); }); }); }); it('should build each email using preferences of the copayers', function(done) { // Set same email address for copayer1 and copayer2 server.savePreferences({ email: '[email protected]', language: 'es', unit: 'btc', }, function(err) { server.createAddress({}, function(err, address) { should.not.exist(err); // Simulate incoming tx notification server._notify('NewIncomingTx', { txid: '999', address: address, amount: 12300000, }, function(err) { setTimeout(function() { var calls = mailerStub.sendMail.getCalls(); calls.length.should.equal(3); var emails = _.map(calls, function(c) { return c.args[0]; }); var spanish = _.find(emails, { to: '[email protected]' }); spanish.from.should.equal('[email protected]'); spanish.subject.should.contain('Nuevo pago recibido'); spanish.text.should.contain(wallet.name); spanish.text.should.contain('0.123 BTC'); var english = _.find(emails, { to: '[email protected]' }); english.from.should.equal('[email protected]'); english.subject.should.contain('New payment received'); english.text.should.contain(wallet.name); english.text.should.contain('123,000 bits'); done(); }, 100); }); }); }); }); it('should support multiple emailservice instances running concurrently', function(done) { var emailService2 = new EmailService(); emailService2.start({ lock: emailService.lock, // Use same locker service messageBroker: server.messageBroker, storage: storage, mailer: mailerStub, emailOpts: { from: '[email protected]', subjectPrefix: '[test wallet 2]', }, }, function(err) { helpers.stubUtxos(server, wallet, 1, function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.8, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); setTimeout(function() { var calls = mailerStub.sendMail.getCalls(); calls.length.should.equal(2); server.storage.fetchUnsentEmails(function(err, unsent) { should.not.exist(err); unsent.should.be.empty; done(); }); }, 100); }); }); }); }); }); describe('1-of-N wallet', function() { beforeEach(function(done) { helpers.createAndJoinWallet(1, 2, function(s, w) { server = s; wallet = w; var i = 0; async.eachSeries(w.copayers, function(copayer, next) { helpers.getAuthServer(copayer.id, function(server) { server.savePreferences({ email: 'copayer' + (++i) + '@domain.com', unit: 'bit', }, next); }); }, function(err) { should.not.exist(err); mailerStub = sinon.stub(); mailerStub.sendMail = sinon.stub(); mailerStub.sendMail.yields(); emailService = new EmailService(); emailService.start({ lockOpts: {}, messageBroker: server.messageBroker, storage: storage, mailer: mailerStub, emailOpts: { from: '[email protected]', subjectPrefix: '[test wallet]', publicTxUrlTemplate: { livenet: 'https://insight.bitpay.com/tx/{{txid}}', testnet: 'https://test-insight.bitpay.com/tx/{{txid}}', }, }, }, function(err) { should.not.exist(err); done(); }); }); }); }); it('should NOT notify copayers a new tx proposal has been created', function(done) { helpers.stubUtxos(server, wallet, [1, 1], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.8, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); setTimeout(function() { var calls = mailerStub.sendMail.getCalls(); calls.length.should.equal(0); done(); }, 100); }); }); }); }); }); describe('#getServiceVersion', function() { it('should get version from package', function() { WalletService.getServiceVersion().should.equal('bws-' + require('../../package').version); }); }); describe('#getInstance', function() { it('should get server instance', function() { var server = WalletService.getInstance({ clientVersion: 'bwc-0.0.1', }); server.clientVersion.should.equal('bwc-0.0.1'); }); }); describe('#getInstanceWithAuth', function() { it('should get server instance for existing copayer', function(done) { helpers.createAndJoinWallet(1, 2, function(s, wallet) { var xpriv = TestData.copayers[0].xPrivKey; var priv = TestData.copayers[0].privKey_1H_0; var sig = WalletUtils.signMessage('hello world', priv); WalletService.getInstanceWithAuth({ copayerId: wallet.copayers[0].id, message: 'hello world', signature: sig, clientVersion: 'bwc-0.0.1', }, function(err, server) { should.not.exist(err); server.walletId.should.equal(wallet.id); server.copayerId.should.equal(wallet.copayers[0].id); server.clientVersion.should.equal('bwc-0.0.1'); done(); }); }); }); it('should fail when requesting for non-existent copayer', function(done) { var message = 'hello world'; var opts = { copayerId: 'dummy', message: message, signature: WalletUtils.signMessage(message, TestData.copayers[0].privKey_1H_0), }; WalletService.getInstanceWithAuth(opts, function(err, server) { err.code.should.equal('NOT_AUTHORIZED'); err.message.should.contain('Copayer not found'); done(); }); }); it('should fail when message signature cannot be verified', function(done) { helpers.createAndJoinWallet(1, 2, function(s, wallet) { WalletService.getInstanceWithAuth({ copayerId: wallet.copayers[0].id, message: 'dummy', signature: 'dummy', }, function(err, server) { err.code.should.equal('NOT_AUTHORIZED'); err.message.should.contain('Invalid signature'); done(); }); }); }); }); describe('#createWallet', function() { var server; beforeEach(function() { server = new WalletService(); }); it('should create and store wallet', function(done) { var opts = { name: 'my wallet', m: 2, n: 3, pubKey: TestData.keyPair.pub, }; server.createWallet(opts, function(err, walletId) { should.not.exist(err); server.storage.fetchWallet(walletId, function(err, wallet) { should.not.exist(err); wallet.id.should.equal(walletId); wallet.name.should.equal('my wallet'); done(); }); }); }); it('should create wallet with given id', function(done) { var opts = { name: 'my wallet', m: 2, n: 3, pubKey: TestData.keyPair.pub, id: '1234', }; server.createWallet(opts, function(err, walletId) { should.not.exist(err); server.storage.fetchWallet('1234', function(err, wallet) { should.not.exist(err); wallet.id.should.equal(walletId); wallet.name.should.equal('my wallet'); done(); }); }); }); it('should fail to create wallets with same id', function(done) { var opts = { name: 'my wallet', m: 2, n: 3, pubKey: TestData.keyPair.pub, id: '1234', }; server.createWallet(opts, function(err, walletId) { server.createWallet(opts, function(err, walletId) { err.message.should.contain('Wallet already exists'); done(); }); }); }); it('should fail to create wallet with no name', function(done) { var opts = { name: '', m: 2, n: 3, pubKey: TestData.keyPair.pub, }; server.createWallet(opts, function(err, walletId) { should.not.exist(walletId); should.exist(err); err.message.should.contain('name'); done(); }); }); it('should fail to create wallet with invalid copayer pairs', function(done) { var invalidPairs = [{ m: 0, n: 0 }, { m: 0, n: 2 }, { m: 2, n: 1 }, { m: 0, n: 10 }, { m: 1, n: 20 }, { m: 10, n: 10 }, ]; var opts = { id: '123', name: 'my wallet', pubKey: TestData.keyPair.pub, }; async.each(invalidPairs, function(pair, cb) { opts.m = pair.m; opts.n = pair.n; server.createWallet(opts, function(err) { should.exist(err); err.message.should.equal('Invalid combination of required copayers / total copayers'); return cb(); }); }, function(err) { done(); }); }); it('should fail to create wallet with invalid pubKey argument', function(done) { var opts = { name: 'my wallet', m: 2, n: 3, pubKey: 'dummy', }; server.createWallet(opts, function(err, walletId) { should.not.exist(walletId); should.exist(err); err.message.should.contain('Invalid public key'); done(); }); }); }); describe('#joinWallet', function() { var server, walletId; beforeEach(function(done) { server = new WalletService(); var walletOpts = { name: 'my wallet', m: 1, n: 2, pubKey: TestData.keyPair.pub, }; server.createWallet(walletOpts, function(err, wId) { should.not.exist(err); walletId = wId; should.exist(walletId); done(); }); }); it('should join existing wallet', function(done) { var copayerOpts = helpers.getSignedCopayerOpts({ walletId: walletId, name: 'me', xPubKey: TestData.copayers[0].xPubKey_44H_0H_0H, requestPubKey: TestData.copayers[0].pubKey_1H_0, customData: 'dummy custom data', }); server.joinWallet(copayerOpts, function(err, result) { should.not.exist(err); var copayerId = result.copayerId; helpers.getAuthServer(copayerId, function(server) { server.getWallet({}, function(err, wallet) { wallet.id.should.equal(walletId); wallet.copayers.length.should.equal(1); var copayer = wallet.copayers[0]; copayer.name.should.equal('me'); copayer.id.should.equal(copayerId); copayer.customData.should.equal('dummy custom data'); server.getNotifications({}, function(err, notifications) { should.not.exist(err); var notif = _.find(notifications, { type: 'NewCopayer' }); should.exist(notif); notif.data.walletId.should.equal(walletId); notif.data.copayerId.should.equal(copayerId); notif.data.copayerName.should.equal('me'); notif = _.find(notifications, { type: 'WalletComplete' }); should.not.exist(notif); done(); }); }); }); }); }); it('should fail to join with no name', function(done) { var copayerOpts = helpers.getSignedCopayerOpts({ walletId: walletId, name: '', xPubKey: TestData.copayers[0].xPubKey_44H_0H_0H, requestPubKey: TestData.copayers[0].pubKey_1H_0, }); server.joinWallet(copayerOpts, function(err, result) { should.not.exist(result); should.exist(err); err.message.should.contain('name'); done(); }); }); it('should fail to join non-existent wallet', function(done) { var copayerOpts = { walletId: '123', name: 'me', xPubKey: 'dummy', requestPubKey: 'dummy', copayerSignature: 'dummy', }; server.joinWallet(copayerOpts, function(err) { should.exist(err); done(); }); }); it('should fail to join full wallet', function(done) { helpers.createAndJoinWallet(1, 1, function(s, wallet) { var copayerOpts = helpers.getSignedCopayerOpts({ walletId: wallet.id, name: 'me', xPubKey: TestData.copayers[1].xPubKey_44H_0H_0H, requestPubKey: TestData.copayers[1].pubKey_1H_0, }); server.joinWallet(copayerOpts, function(err) { should.exist(err); err.code.should.equal('WALLET_FULL'); err.message.should.equal('Wallet full'); done(); }); }); }); it('should return copayer in wallet error before full wallet', function(done) { helpers.createAndJoinWallet(1, 1, function(s, wallet) { var copayerOpts = helpers.getSignedCopayerOpts({ walletId: wallet.id, name: 'me', xPubKey: TestData.copayers[0].xPubKey_44H_0H_0H, requestPubKey: TestData.copayers[0].pubKey_1H_0, }); server.joinWallet(copayerOpts, function(err) { should.exist(err); err.code.should.equal('COPAYER_IN_WALLET'); done(); }); }); }); it('should fail to re-join wallet', function(done) { var copayerOpts = helpers.getSignedCopayerOpts({ walletId: walletId, name: 'me', xPubKey: TestData.copayers[0].xPubKey_44H_0H_0H, requestPubKey: TestData.copayers[0].pubKey_1H_0, }); server.joinWallet(copayerOpts, function(err) { should.not.exist(err); server.joinWallet(copayerOpts, function(err) { should.exist(err); err.code.should.equal('COPAYER_IN_WALLET'); err.message.should.equal('Copayer already in wallet'); done(); }); }); }); it('should be able to get wallet info without actually joining', function(done) { var copayerOpts = helpers.getSignedCopayerOpts({ walletId: walletId, name: 'me', xPubKey: TestData.copayers[0].xPubKey_44H_0H_0H, requestPubKey: TestData.copayers[0].pubKey_1H_0, customData: 'dummy custom data', dryRun: true, }); server.joinWallet(copayerOpts, function(err, result) { should.not.exist(err); should.exist(result); should.not.exist(result.copayerId); result.wallet.id.should.equal(walletId); result.wallet.m.should.equal(1); result.wallet.n.should.equal(2); result.wallet.copayers.should.be.empty; server.storage.fetchWallet(walletId, function(err, wallet) { should.not.exist(err); wallet.id.should.equal(walletId); wallet.copayers.should.be.empty; done(); }); }); }); it('should fail to join two wallets with same xPubKey', function(done) { var copayerOpts = helpers.getSignedCopayerOpts({ walletId: walletId, name: 'me', xPubKey: TestData.copayers[0].xPubKey_44H_0H_0H, requestPubKey: TestData.copayers[0].pubKey_1H_0, }); server.joinWallet(copayerOpts, function(err) { should.not.exist(err); var walletOpts = { name: 'my other wallet', m: 1, n: 1, pubKey: TestData.keyPair.pub, }; server.createWallet(walletOpts, function(err, walletId) { should.not.exist(err); copayerOpts = helpers.getSignedCopayerOpts({ walletId: walletId, name: 'me', xPubKey: TestData.copayers[0].xPubKey_44H_0H_0H, requestPubKey: TestData.copayers[0].pubKey_1H_0, }); server.joinWallet(copayerOpts, function(err) { should.exist(err); err.code.should.equal('COPAYER_REGISTERED'); err.message.should.equal('Copayer ID already registered on server'); done(); }); }); }); }); it('should fail to join with bad formated signature', function(done) { var copayerOpts = { walletId: walletId, name: 'me', xPubKey: TestData.copayers[0].xPubKey_44H_0H_0H, requestPubKey: TestData.copayers[0].pubKey_1H_0, copayerSignature: 'bad sign', }; server.joinWallet(copayerOpts, function(err) { err.message.should.equal('Bad request'); done(); }); }); it('should fail to join with null signature', function(done) { var copayerOpts = { walletId: walletId, name: 'me', xPubKey: TestData.copayers[0].xPubKey_44H_0H_0H, requestPubKey: TestData.copayers[0].pubKey_1H_0, }; server.joinWallet(copayerOpts, function(err) { should.exist(err); err.message.should.contain('argument missing'); done(); }); }); it('should fail to join with wrong signature', function(done) { var copayerOpts = helpers.getSignedCopayerOpts({ walletId: walletId, name: 'me', xPubKey: TestData.copayers[0].xPubKey_44H_0H_0H, requestPubKey: TestData.copayers[0].pubKey_1H_0, }); copayerOpts.name = 'me2'; server.joinWallet(copayerOpts, function(err) { err.message.should.equal('Bad request'); done(); }); }); it('should set pkr and status = complete on last copayer joining (2-3)', function(done) { helpers.createAndJoinWallet(2, 3, function(server) { server.getWallet({}, function(err, wallet) { should.not.exist(err); wallet.status.should.equal('complete'); wallet.publicKeyRing.length.should.equal(3); server.getNotifications({}, function(err, notifications) { should.not.exist(err); var notif = _.find(notifications, { type: 'WalletComplete' }); should.exist(notif); notif.data.walletId.should.equal(wallet.id); done(); }); }); }); }); it('should not notify WalletComplete if 1-of-1', function(done) { helpers.createAndJoinWallet(1, 1, function(server) { server.getNotifications({}, function(err, notifications) { should.not.exist(err); var notif = _.find(notifications, { type: 'WalletComplete' }); should.not.exist(notif); done(); }); }); }); }); describe('#joinWallet new/legacy clients', function() { var server; beforeEach(function() { server = new WalletService(); }); it('should fail to join legacy wallet from new client', function(done) { var walletOpts = { name: 'my wallet', m: 1, n: 2, pubKey: TestData.keyPair.pub, supportBIP44AndP2PKH: false, }; server.createWallet(walletOpts, function(err, walletId) { should.not.exist(err); should.exist(walletId); var copayerOpts = helpers.getSignedCopayerOpts({ walletId: walletId, name: 'me', xPubKey: TestData.copayers[0].xPubKey_44H_0H_0H, requestPubKey: TestData.copayers[0].pubKey_1H_0, }); server.joinWallet(copayerOpts, function(err, result) { should.exist(err); err.message.should.contain('The wallet you are trying to join was created with an older version of the client app'); done(); }); }); }); it('should fail to join new wallet from legacy client', function(done) { var walletOpts = { name: 'my wallet', m: 1, n: 2, pubKey: TestData.keyPair.pub, }; server.createWallet(walletOpts, function(err, walletId) { should.not.exist(err); should.exist(walletId); var copayerOpts = helpers.getSignedCopayerOpts({ walletId: walletId, name: 'me', xPubKey: TestData.copayers[0].xPubKey_45H, requestPubKey: TestData.copayers[0].pubKey_1H_0, supportBIP44AndP2PKH: false, }); server.joinWallet(copayerOpts, function(err, result) { should.exist(err); err.code.should.equal('UPGRADE_NEEDED'); done(); }); }); }); }); describe('Address derivation strategy', function() { var server; beforeEach(function() { server = WalletService.getInstance(); }); it('should use BIP44 & P2PKH for 1-of-1 wallet if supported', function(done) { var walletOpts = { name: 'my wallet', m: 1, n: 1, pubKey: TestData.keyPair.pub, }; server.createWallet(walletOpts, function(err, wid) { should.not.exist(err); server.storage.fetchWallet(wid, function(err, wallet) { should.not.exist(err); wallet.derivationStrategy.should.equal('BIP44'); wallet.addressType.should.equal('P2PKH'); done(); }); }); }); it('should use BIP45 & P2SH for 1-of-1 wallet if not supported', function(done) { var walletOpts = { name: 'my wallet', m: 1, n: 1, pubKey: TestData.keyPair.pub, supportBIP44AndP2PKH: false, }; server.createWallet(walletOpts, function(err, wid) { should.not.exist(err); server.storage.fetchWallet(wid, function(err, wallet) { should.not.exist(err); wallet.derivationStrategy.should.equal('BIP45'); wallet.addressType.should.equal('P2SH'); done(); }); }); }); it('should use BIP44 & P2SH for shared wallet if supported', function(done) { var walletOpts = { name: 'my wallet', m: 2, n: 3, pubKey: TestData.keyPair.pub, }; server.createWallet(walletOpts, function(err, wid) { should.not.exist(err); server.storage.fetchWallet(wid, function(err, wallet) { should.not.exist(err); wallet.derivationStrategy.should.equal('BIP44'); wallet.addressType.should.equal('P2SH'); done(); }); }); }); it('should use BIP45 & P2SH for shared wallet if supported', function(done) { var walletOpts = { name: 'my wallet', m: 2, n: 3, pubKey: TestData.keyPair.pub, supportBIP44AndP2PKH: false, }; server.createWallet(walletOpts, function(err, wid) { should.not.exist(err); server.storage.fetchWallet(wid, function(err, wallet) { should.not.exist(err); wallet.derivationStrategy.should.equal('BIP45'); wallet.addressType.should.equal('P2SH'); done(); }); }); }); }); describe('#getStatus', function() { var server, wallet; beforeEach(function(done) { helpers.createAndJoinWallet(1, 2, function(s, w) { server = s; wallet = w; done(); }); }); it('should get status', function(done) { server.getStatus({}, function(err, status) { should.not.exist(err); should.exist(status); should.exist(status.wallet); status.wallet.name.should.equal(wallet.name); should.exist(status.wallet.copayers); status.wallet.copayers.length.should.equal(2); should.exist(status.balance); status.balance.totalAmount.should.equal(0); should.exist(status.preferences); should.exist(status.pendingTxps); status.pendingTxps.should.be.empty; should.not.exist(status.wallet.publicKeyRing); should.not.exist(status.wallet.pubKey); should.not.exist(status.wallet.addressManager); _.each(status.wallet.copayers, function(copayer) { should.not.exist(copayer.xPubKey); should.not.exist(copayer.requestPubKey); should.not.exist(copayer.signature); should.not.exist(copayer.requestPubKey); should.not.exist(copayer.addressManager); should.not.exist(copayer.customData); }); done(); }); }); it('should get status including extended info', function(done) { server.getStatus({ includeExtendedInfo: true }, function(err, status) { should.not.exist(err); should.exist(status); should.exist(status.wallet.publicKeyRing); should.exist(status.wallet.pubKey); should.exist(status.wallet.addressManager); should.exist(status.wallet.copayers[0].xPubKey); should.exist(status.wallet.copayers[0].requestPubKey); should.exist(status.wallet.copayers[0].signature); should.exist(status.wallet.copayers[0].requestPubKey); should.exist(status.wallet.copayers[0].customData); // Do not return other copayer's custom data _.each(_.rest(status.wallet.copayers), function(copayer) { should.not.exist(copayer.customData); }); done(); }); }); it('should get status after tx creation', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); server.getStatus({}, function(err, status) { should.not.exist(err); status.pendingTxps.length.should.equal(1); var balance = status.balance; balance.totalAmount.should.equal(helpers.toSatoshi(300)); balance.lockedAmount.should.equal(tx.inputs[0].satoshis); balance.availableAmount.should.equal(balance.totalAmount - balance.lockedAmount); done(); }); }); }); }); }); describe('#verifyMessageSignature', function() { var server, wallet; beforeEach(function(done) { helpers.createAndJoinWallet(2, 3, function(s, w) { server = s; wallet = w; done(); }); }); it('should successfully verify message signature', function(done) { var message = 'hello world'; var opts = { message: message, signature: WalletUtils.signMessage(message, TestData.copayers[0].privKey_1H_0), }; server.verifyMessageSignature(opts, function(err, isValid) { should.not.exist(err); isValid.should.be.true; done(); }); }); it('should fail to verify message signature for different copayer', function(done) { var message = 'hello world'; var opts = { message: message, signature: WalletUtils.signMessage(message, TestData.copayers[0].privKey_1H_0), }; helpers.getAuthServer(wallet.copayers[1].id, function(server) { server.verifyMessageSignature(opts, function(err, isValid) { should.not.exist(err); isValid.should.be.false; done(); }); }); }); }); describe('#createAddress', function() { var server, wallet; describe('shared wallets (BIP45)', function() { beforeEach(function(done) { helpers.createAndJoinWallet(2, 2, { supportBIP44AndP2PKH: false }, function(s, w) { server = s; wallet = w; done(); }); }); it('should create address', function(done) { server.createAddress({}, function(err, address) { should.not.exist(err); should.exist(address); address.walletId.should.equal(wallet.id); address.network.should.equal('livenet'); address.address.should.equal('3BVJZ4CYzeTtawDtgwHvWV5jbvnXtYe97i'); address.isChange.should.be.false; address.path.should.equal('m/2147483647/0/0'); address.type.should.equal('P2SH'); server.getNotifications({}, function(err, notifications) { should.not.exist(err); var notif = _.find(notifications, { type: 'NewAddress' }); should.exist(notif); notif.data.address.should.equal(address.address); done(); }); }); }); it('should protect against storing same address multiple times', function(done) { server.createAddress({}, function(err, address) { should.not.exist(err); should.exist(address); delete address._id; server.storage.storeAddressAndWallet(wallet, address, function(err) { should.not.exist(err); server.getMainAddresses({}, function(err, addresses) { should.not.exist(err); addresses.length.should.equal(1); done(); }); }); }); }); it('should create many addresses on simultaneous requests', function(done) { var N = 5; async.map(_.range(N), function(i, cb) { server.createAddress({}, cb); }, function(err, addresses) { addresses.length.should.equal(N); _.each(_.range(N), function(i) { addresses[i].path.should.equal('m/2147483647/0/' + i); }); // No two identical addresses _.uniq(_.pluck(addresses, 'address')).length.should.equal(N); done(); }); }); }); describe('shared wallets (BIP44)', function() { beforeEach(function(done) { helpers.createAndJoinWallet(2, 2, function(s, w) { server = s; wallet = w; done(); }); }); it('should create address', function(done) { server.createAddress({}, function(err, address) { should.not.exist(err); should.exist(address); address.walletId.should.equal(wallet.id); address.network.should.equal('livenet'); address.address.should.equal('36q2G5FMGvJbPgAVEaiyAsFGmpkhPKwk2r'); address.isChange.should.be.false; address.path.should.equal('m/0/0'); address.type.should.equal('P2SH'); server.getNotifications({}, function(err, notifications) { should.not.exist(err); var notif = _.find(notifications, { type: 'NewAddress' }); should.exist(notif); notif.data.address.should.equal(address.address); done(); }); }); }); it('should create many addresses on simultaneous requests', function(done) { var N = 5; async.map(_.range(N), function(i, cb) { server.createAddress({}, cb); }, function(err, addresses) { addresses.length.should.equal(N); _.each(_.range(N), function(i) { addresses[i].path.should.equal('m/0/' + i); }); // No two identical addresses _.uniq(_.pluck(addresses, 'address')).length.should.equal(N); done(); }); }); it('should not create address if unable to store it', function(done) { sinon.stub(server.storage, 'storeAddressAndWallet').yields('dummy error'); server.createAddress({}, function(err, address) { should.exist(err); should.not.exist(address); server.getMainAddresses({}, function(err, addresses) { addresses.length.should.equal(0); server.storage.storeAddressAndWallet.restore(); server.createAddress({}, function(err, address) { should.not.exist(err); should.exist(address); done(); }); }); }); }); }); describe('1-of-1 (BIP44 & P2PKH)', function() { beforeEach(function(done) { helpers.createAndJoinWallet(1, 1, function(s, w) { server = s; wallet = w; w.copayers[0].id.should.equal(TestData.copayers[0].id44); done(); }); }); it('should create address', function(done) { server.createAddress({}, function(err, address) { should.not.exist(err); should.exist(address); address.walletId.should.equal(wallet.id); address.network.should.equal('livenet'); address.address.should.equal('1L3z9LPd861FWQhf3vDn89Fnc9dkdBo2CG'); address.isChange.should.be.false; address.path.should.equal('m/0/0'); address.type.should.equal('P2PKH'); server.getNotifications({}, function(err, notifications) { should.not.exist(err); var notif = _.find(notifications, { type: 'NewAddress' }); should.exist(notif); notif.data.address.should.equal(address.address); done(); }); }); }); it('should create many addresses on simultaneous requests', function(done) { var N = 5; async.map(_.range(N), function(i, cb) { server.createAddress({}, cb); }, function(err, addresses) { addresses.length.should.equal(N); _.each(_.range(N), function(i) { addresses[i].path.should.equal('m/0/' + i); }); // No two identical addresses _.uniq(_.pluck(addresses, 'address')).length.should.equal(N); done(); }); }); }); }); describe('Preferences', function() { var server, wallet; beforeEach(function(done) { helpers.createAndJoinWallet(2, 2, function(s, w) { server = s; wallet = w; done(); }); }); it('should save & retrieve preferences', function(done) { server.savePreferences({ email: '[email protected]', language: 'es', unit: 'bit', dummy: 'ignored', }, function(err) { should.not.exist(err); server.getPreferences({}, function(err, preferences) { should.not.exist(err); should.exist(preferences); preferences.email.should.equal('[email protected]'); preferences.language.should.equal('es'); preferences.unit.should.equal('bit'); should.not.exist(preferences.dummy); done(); }); }); }); it('should save preferences only for requesting copayer', function(done) { server.savePreferences({ email: '[email protected]' }, function(err) { should.not.exist(err); helpers.getAuthServer(wallet.copayers[1].id, function(server2) { server2.getPreferences({}, function(err, preferences) { should.not.exist(err); should.not.exist(preferences.email); done(); }); }); }); }); it('should save preferences incrementally', function(done) { async.series([ function(next) { server.savePreferences({ email: '[email protected]', }, next); }, function(next) { server.getPreferences({}, function(err, preferences) { should.not.exist(err); should.exist(preferences); preferences.email.should.equal('[email protected]'); should.not.exist(preferences.language); next(); }); }, function(next) { server.savePreferences({ language: 'es', }, next); }, function(next) { server.getPreferences({}, function(err, preferences) { should.not.exist(err); should.exist(preferences); preferences.language.should.equal('es'); preferences.email.should.equal('[email protected]'); next(); }); }, function(next) { server.savePreferences({ language: null, unit: 'bit', }, next); }, function(next) { server.getPreferences({}, function(err, preferences) { should.not.exist(err); should.exist(preferences); preferences.unit.should.equal('bit'); should.not.exist(preferences.language); preferences.email.should.equal('[email protected]'); next(); }); }, ], function(err) { should.not.exist(err); done(); }); }); it.skip('should save preferences only for requesting wallet', function(done) {}); it('should validate entries', function(done) { var invalid = [{ preferences: { email: ' ', }, expected: 'email' }, { preferences: { email: 'dummy@' + _.repeat('domain', 50), }, expected: 'email' }, { preferences: { language: 'xxxxx', }, expected: 'language' }, { preferences: { language: 123, }, expected: 'language' }, { preferences: { unit: 'xxxxx', }, expected: 'unit' }, ]; async.each(invalid, function(item, next) { server.savePreferences(item.preferences, function(err) { should.exist(err); err.message.should.contain(item.expected); next(); }); }, done); }); }); describe('#getUtxos', function() { var server, wallet; beforeEach(function(done) { helpers.createAndJoinWallet(1, 1, function(s, w) { server = s; wallet = w; done(); }); }); it('should get UTXOs for wallet addresses', function(done) { helpers.stubUtxos(server, wallet, [1, 2], function() { server.getUtxos({}, function(err, utxos) { should.not.exist(err); should.exist(utxos); utxos.length.should.equal(2); _.sum(utxos, 'satoshis').should.equal(3 * 1e8); server.getMainAddresses({}, function(err, addresses) { var utxo = utxos[0]; var address = _.find(addresses, { address: utxo.address }); should.exist(address); utxo.path.should.equal(address.path); utxo.publicKeys.should.deep.equal(address.publicKeys); done(); }); }); }); }); it('should get UTXOs for specific addresses', function(done) { helpers.stubUtxos(server, wallet, [1, 2, 3], function(utxos) { _.uniq(utxos, 'address').length.should.be.above(1); var address = utxos[0].address; var amount = _.sum(_.filter(utxos, { address: address }), 'satoshis'); server.getUtxos({ addresses: [address] }, function(err, utxos) { should.not.exist(err); should.exist(utxos); _.sum(utxos, 'satoshis').should.equal(amount); done(); }); }); }); }); describe('Multiple request Pub Keys', function() { var server, wallet; var opts, reqPrivKey, ws; var getAuthServer = function(copayerId, privKey, cb) { var msg = 'dummy'; var sig = WalletUtils.signMessage(msg, privKey); WalletService.getInstanceWithAuth({ copayerId: copayerId, message: msg, signature: sig, clientVersion: CLIENT_VERSION, }, function(err, server) { return cb(err, server); }); }; beforeEach(function() { reqPrivKey = new Bitcore.PrivateKey(); var requestPubKey = reqPrivKey.toPublicKey(); var xPrivKey = TestData.copayers[0].xPrivKey_44H_0H_0H; var sig = WalletUtils.signRequestPubKey(requestPubKey, xPrivKey); var copayerId = WalletUtils.xPubToCopayerId(TestData.copayers[0].xPubKey_44H_0H_0H); opts = { copayerId: copayerId, requestPubKey: requestPubKey, signature: sig, }; ws = new WalletService(); }); describe('#addAccess 1-1', function() { beforeEach(function(done) { helpers.createAndJoinWallet(1, 1, function(s, w) { server = s; wallet = w; helpers.stubUtxos(server, wallet, 1, function() { done(); }); }); }); it('should be able to re-gain access from xPrivKey', function(done) { ws.addAccess(opts, function(err, res) { should.not.exist(err); res.wallet.copayers[0].requestPubKeys.length.should.equal(2); res.wallet.copayers[0].requestPubKeys[0].selfSigned.should.equal(true); server.getBalance(res.wallet.walletId, function(err, bal) { should.not.exist(err); bal.totalAmount.should.equal(1e8); getAuthServer(opts.copayerId, reqPrivKey, function(err, server2) { server2.getBalance(res.wallet.walletId, function(err, bal2) { should.not.exist(err); bal2.totalAmount.should.equal(1e8); done(); }); }); }); }); }); it('should fail to gain access with wrong xPrivKey', function(done) { opts.signature = 'xx'; ws.addAccess(opts, function(err, res) { err.code.should.equal('NOT_AUTHORIZED'); done(); }); }); it('should fail to access with wrong privkey after gaining access', function(done) { ws.addAccess(opts, function(err, res) { should.not.exist(err); server.getBalance(res.wallet.walletId, function(err, bal) { should.not.exist(err); var privKey = new Bitcore.PrivateKey(); (getAuthServer(opts.copayerId, privKey, function(err, server2) { err.code.should.equal('NOT_AUTHORIZED'); done(); })); }); }); }); it('should be able to create TXs after regaining access', function(done) { ws.addAccess(opts, function(err, res) { should.not.exist(err); getAuthServer(opts.copayerId, reqPrivKey, function(err, server2) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.8, reqPrivKey); server2.createTx(txOpts, function(err, tx) { should.not.exist(err); done(); }); }); }); }); }); describe('#addAccess 2-2', function() { beforeEach(function(done) { helpers.createAndJoinWallet(2, 2, function(s, w) { server = s; wallet = w; helpers.stubUtxos(server, wallet, 1, function() { done(); }); }); }); it('should be able to re-gain access from xPrivKey', function(done) { ws.addAccess(opts, function(err, res) { should.not.exist(err); server.getBalance(res.wallet.walletId, function(err, bal) { should.not.exist(err); bal.totalAmount.should.equal(1e8); getAuthServer(opts.copayerId, reqPrivKey, function(err, server2) { server2.getBalance(res.wallet.walletId, function(err, bal2) { should.not.exist(err); bal2.totalAmount.should.equal(1e8); done(); }); }); }); }); }); it('TX proposals should include info to be verified', function(done) { ws.addAccess(opts, function(err, res) { should.not.exist(err); getAuthServer(opts.copayerId, reqPrivKey, function(err, server2) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.8, reqPrivKey); server2.createTx(txOpts, function(err, tx) { should.not.exist(err); server2.getPendingTxs({}, function(err, txs) { should.not.exist(err); should.exist(txs[0].proposalSignaturePubKey); should.exist(txs[0].proposalSignaturePubKeySig); done(); }); }); }); }); }); }); }); describe('#getBalance', function() { var server, wallet; beforeEach(function(done) { helpers.createAndJoinWallet(1, 1, function(s, w) { server = s; wallet = w; done(); }); }); it('should get balance', function(done) { helpers.stubUtxos(server, wallet, [1, 'u2', 3], function() { server.getBalance({}, function(err, balance) { should.not.exist(err); should.exist(balance); balance.totalAmount.should.equal(helpers.toSatoshi(6)); balance.lockedAmount.should.equal(0); balance.availableAmount.should.equal(helpers.toSatoshi(6)); balance.totalBytesToSendMax.should.equal(578); balance.totalConfirmedAmount.should.equal(helpers.toSatoshi(4)); balance.lockedConfirmedAmount.should.equal(0); balance.availableConfirmedAmount.should.equal(helpers.toSatoshi(4)); should.exist(balance.byAddress); balance.byAddress.length.should.equal(2); balance.byAddress[0].amount.should.equal(helpers.toSatoshi(4)); balance.byAddress[1].amount.should.equal(helpers.toSatoshi(2)); server.getMainAddresses({}, function(err, addresses) { should.not.exist(err); var addresses = _.uniq(_.pluck(addresses, 'address')); _.intersection(addresses, _.pluck(balance.byAddress, 'address')).length.should.equal(2); done(); }); }); }); }); it('should get balance when there are no addresses', function(done) { server.getBalance({}, function(err, balance) { should.not.exist(err); should.exist(balance); balance.totalAmount.should.equal(0); balance.lockedAmount.should.equal(0); balance.availableAmount.should.equal(0); balance.totalBytesToSendMax.should.equal(0); should.exist(balance.byAddress); balance.byAddress.length.should.equal(0); done(); }); }); it('should get balance when there are no funds', function(done) { blockchainExplorer.getUnspentUtxos = sinon.stub().callsArgWith(1, null, []); server.createAddress({}, function(err, address) { should.not.exist(err); server.getBalance({}, function(err, balance) { should.not.exist(err); should.exist(balance); balance.totalAmount.should.equal(0); balance.lockedAmount.should.equal(0); balance.availableAmount.should.equal(0); balance.totalBytesToSendMax.should.equal(0); should.exist(balance.byAddress); balance.byAddress.length.should.equal(0); done(); }); }); }); it('should only include addresses with balance', function(done) { helpers.stubUtxos(server, wallet, 1, function(utxos) { server.createAddress({}, function(err, address) { should.not.exist(err); server.getBalance({}, function(err, balance) { should.not.exist(err); balance.byAddress.length.should.equal(1); balance.byAddress[0].amount.should.equal(helpers.toSatoshi(1)); balance.byAddress[0].address.should.equal(utxos[0].address); done(); }); }); }); }); it('should return correct kb to send max', function(done) { helpers.stubUtxos(server, wallet, _.range(1, 10, 0), function() { server.getBalance({}, function(err, balance) { should.not.exist(err); should.exist(balance); balance.totalAmount.should.equal(helpers.toSatoshi(9)); balance.lockedAmount.should.equal(0); balance.totalBytesToSendMax.should.equal(1535); done(); }); }); }); it('should fail gracefully when blockchain is unreachable', function(done) { blockchainExplorer.getUnspentUtxos = sinon.stub().callsArgWith(1, 'dummy error'); server.createAddress({}, function(err, address) { should.not.exist(err); server.getBalance({}, function(err, balance) { should.exist(err); err.toString().should.equal('dummy error'); done(); }); }); }); }); describe('#getFeeLevels', function() { var server, wallet; beforeEach(function(done) { helpers.createAndJoinWallet(1, 1, function(s, w) { server = s; wallet = w; done(); }); }); it('should get current fee levels', function(done) { helpers.stubFeeLevels({ 1: 40000, 2: 20000, 6: 18000, }); server.getFeeLevels({}, function(err, fees) { should.not.exist(err); fees = _.zipObject(_.map(fees, function(item) { return [item.level, item]; })); fees.priority.feePerKb.should.equal(40000); fees.priority.nbBlocks.should.equal(1); fees.normal.feePerKb.should.equal(20000); fees.normal.nbBlocks.should.equal(2); fees.economy.feePerKb.should.equal(18000); fees.economy.nbBlocks.should.equal(6); done(); }); }); it('should get default fees if network cannot be accessed', function(done) { blockchainExplorer.estimateFee = sinon.stub().yields('dummy error'); server.getFeeLevels({}, function(err, fees) { should.not.exist(err); fees = _.zipObject(_.map(fees, function(item) { return [item.level, item.feePerKb]; })); fees.priority.should.equal(50000); fees.normal.should.equal(20000); fees.economy.should.equal(10000); done(); }); }); it('should get default fees if network cannot estimate (returns -1)', function(done) { helpers.stubFeeLevels({ 1: -1, 2: 18000, 6: 0, }); server.getFeeLevels({}, function(err, fees) { should.not.exist(err); fees = _.zipObject(_.map(fees, function(item) { return [item.level, item]; })); fees.priority.feePerKb.should.equal(50000); should.not.exist(fees.priority.nbBlocks); fees.normal.feePerKb.should.equal(18000); fees.normal.nbBlocks.should.equal(2); fees.economy.feePerKb.should.equal(0); fees.economy.nbBlocks.should.equal(6); done(); }); }); }); describe('Wallet not complete tests', function() { it('should fail to create address when wallet is not complete', function(done) { var server = new WalletService(); var walletOpts = { name: 'my wallet', m: 2, n: 3, pubKey: TestData.keyPair.pub, }; server.createWallet(walletOpts, function(err, walletId) { should.not.exist(err); var copayerOpts = helpers.getSignedCopayerOpts({ walletId: walletId, name: 'me', xPubKey: TestData.copayers[0].xPubKey_45H, requestPubKey: TestData.copayers[0].pubKey_1H_0, }); server.joinWallet(copayerOpts, function(err, result) { should.not.exist(err); helpers.getAuthServer(result.copayerId, function(server) { server.createAddress({}, function(err, address) { should.not.exist(address); should.exist(err); err.code.should.equal('WALLET_NOT_COMPLETE'); err.message.should.equal('Wallet is not complete'); done(); }); }); }); }); }); it('should fail to create tx when wallet is not complete', function(done) { var server = new WalletService(); var walletOpts = { name: 'my wallet', m: 2, n: 3, pubKey: TestData.keyPair.pub, }; server.createWallet(walletOpts, function(err, walletId) { should.not.exist(err); var copayerOpts = helpers.getSignedCopayerOpts({ walletId: walletId, name: 'me', xPubKey: TestData.copayers[0].xPubKey_45H, requestPubKey: TestData.copayers[0].pubKey_1H_0, }); server.joinWallet(copayerOpts, function(err, result) { should.not.exist(err); helpers.getAuthServer(result.copayerId, function(server, wallet) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, TestData.copayers[0].privKey); server.createTx(txOpts, function(err, tx) { should.not.exist(tx); should.exist(err); err.code.should.equal('WALLET_NOT_COMPLETE'); done(); }); }); }); }); }); }); describe('#createTx', function() { var server, wallet; beforeEach(function(done) { helpers.createAndJoinWallet(2, 3, function(s, w) { server = s; wallet = w; done(); }); }); it('should create a tx', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, TestData.copayers[0].privKey_1H_0, { message: 'some message', customData: 'some custom data' }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); tx.walletId.should.equal(wallet.id); tx.network.should.equal('livenet'); tx.creatorId.should.equal(wallet.copayers[0].id); tx.message.should.equal('some message'); tx.customData.should.equal('some custom data'); tx.isAccepted().should.equal.false; tx.isRejected().should.equal.false; tx.amount.should.equal(helpers.toSatoshi(80)); var estimatedFee = WalletUtils.DEFAULT_FEE_PER_KB * 400 / 1000; // fully signed tx should have about 400 bytes tx.fee.should.be.within(0.9 * estimatedFee, 1.1 * estimatedFee); server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs.length.should.equal(1); // creator txs[0].deleteLockTime.should.equal(0); server.getBalance({}, function(err, balance) { should.not.exist(err); balance.totalAmount.should.equal(helpers.toSatoshi(300)); balance.lockedAmount.should.equal(tx.inputs[0].satoshis); balance.lockedAmount.should.be.below(balance.totalAmount); balance.availableAmount.should.equal(balance.totalAmount - balance.lockedAmount); server.storage.fetchAddresses(wallet.id, function(err, addresses) { should.not.exist(err); var change = _.filter(addresses, { isChange: true }); change.length.should.equal(1); done(); }); }); }); }); }); }); it('should create a tx with legacy signature', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var txOpts = helpers.createProposalOptsLegacy('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, 'some message', TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); done(); }); }); }); it('should create a tx using confirmed utxos first', function(done) { helpers.stubUtxos(server, wallet, [1.3, 'u0.5', 'u0.1', 1.2], function(utxos) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 1.5, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); tx.inputs.length.should.equal(2); _.difference(_.pluck(tx.inputs, 'txid'), [utxos[0].txid, utxos[3].txid]).length.should.equal(0); done(); }); }); }); it('should use unconfirmed utxos only when no more confirmed utxos are available', function(done) { helpers.stubUtxos(server, wallet, [1.3, 'u0.5', 'u0.1', 1.2], function(utxos) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 2.55, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); tx.inputs.length.should.equal(3); var txids = _.pluck(tx.inputs, 'txid'); txids.should.contain(utxos[0].txid); txids.should.contain(utxos[3].txid); done(); }); }); }); it('should exclude unconfirmed utxos if specified', function(done) { helpers.stubUtxos(server, wallet, [1.3, 'u2', 'u0.1', 1.2], function(utxos) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 3, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); txOpts.excludeUnconfirmedUtxos = true; server.createTx(txOpts, function(err, tx) { should.exist(err); err.code.should.equal('INSUFFICIENT_FUNDS'); err.message.should.equal('Insufficient funds'); var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 2.5, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); txOpts.excludeUnconfirmedUtxos = true; server.createTx(txOpts, function(err, tx) { should.exist(err); err.code.should.equal('INSUFFICIENT_FUNDS_FOR_FEE'); err.message.should.equal('Insufficient funds for fee'); done(); }); }); }); }); it('should use non-locked confirmed utxos when specified', function(done) { helpers.stubUtxos(server, wallet, [1.3, 'u2', 'u0.1', 1.2], function(utxos) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 1.4, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); txOpts.excludeUnconfirmedUtxos = true; server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); tx.inputs.length.should.equal(2); server.getBalance({}, function(err, balance) { should.not.exist(err); balance.lockedConfirmedAmount.should.equal(helpers.toSatoshi(2.5)); balance.availableConfirmedAmount.should.equal(0); var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.01, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); txOpts.excludeUnconfirmedUtxos = true; server.createTx(txOpts, function(err, tx) { should.exist(err); err.code.should.equal('LOCKED_FUNDS'); done(); }); }); }); }); }); it('should fail gracefully if unable to reach the blockchain', function(done) { blockchainExplorer.getUnspentUtxos = sinon.stub().callsArgWith(1, 'dummy error'); server.createAddress({}, function(err, address) { should.not.exist(err); var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { should.exist(err); err.toString().should.equal('dummy error'); done(); }); }); }); it('should fail to create tx with invalid proposal signature', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, 'dummy'); server.createTx(txOpts, function(err, tx) { should.not.exist(tx); should.exist(err); err.message.should.equal('Invalid proposal signature'); done(); }); }); }); it('should fail to create tx with proposal signed by another copayer', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, TestData.copayers[1].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(tx); should.exist(err); err.message.should.equal('Invalid proposal signature'); done(); }); }); }); it('should fail to create tx for invalid address', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var txOpts = helpers.createSimpleProposalOpts('invalid address', 80, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.exist(err); should.not.exist(tx); // may fail due to Non-base58 character, or Checksum mismatch, or other done(); }); }); }); it('should fail to create tx for address of different network', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var txOpts = helpers.createSimpleProposalOpts('myE38JHdxmQcTJGP1ZiX4BiGhDxMJDvLJD', 80, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(tx); should.exist(err); err.code.should.equal('INCORRECT_ADDRESS_NETWORK'); err.message.should.equal('Incorrect address network'); done(); }); }); }); it('should fail to create tx for invalid amount', function(done) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(tx); should.exist(err); err.message.should.equal('Invalid amount'); done(); }); }); it('should fail to create tx when insufficient funds', function(done) { helpers.stubUtxos(server, wallet, [100], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 120, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.exist(err); err.code.should.equal('INSUFFICIENT_FUNDS'); err.message.should.equal('Insufficient funds'); server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs.length.should.equal(0); server.getBalance({}, function(err, balance) { should.not.exist(err); balance.lockedAmount.should.equal(0); balance.totalAmount.should.equal(10000000000); done(); }); }); }); }); }); it('should fail to create tx when insufficient funds for fee', function(done) { helpers.stubUtxos(server, wallet, 0.048222, function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.048200, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.exist(err); err.code.should.equal('INSUFFICIENT_FUNDS_FOR_FEE'); err.message.should.equal('Insufficient funds for fee'); done(); }); }); }); it('should scale fees according to tx size', function(done) { helpers.stubUtxos(server, wallet, [1, 1, 1, 1], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 3.5, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(err); var estimatedFee = WalletUtils.DEFAULT_FEE_PER_KB * 1300 / 1000; // fully signed tx should have about 1300 bytes tx.fee.should.be.within(0.9 * estimatedFee, 1.1 * estimatedFee); done(); }); }); }); it('should be possible to use a smaller fee', function(done) { helpers.stubUtxos(server, wallet, 1, function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.99995, TestData.copayers[0].privKey_1H_0, { feePerKb: 80000 }); server.createTx(txOpts, function(err, tx) { should.exist(err); err.code.should.equal('INSUFFICIENT_FUNDS_FOR_FEE'); var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.99995, TestData.copayers[0].privKey_1H_0, { feePerKb: 5000 }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); var estimatedFee = 5000 * 400 / 1000; // fully signed tx should have about 400 bytes tx.fee.should.be.within(0.9 * estimatedFee, 1.1 * estimatedFee); // Sign it to make sure Bitcore doesn't complain about the fees var signatures = helpers.clientSign(tx, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: tx.id, signatures: signatures, }, function(err) { should.not.exist(err); done(); }); }); }); }); }); it('should fail to create tx for dust amount', function(done) { helpers.stubUtxos(server, wallet, [1], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.00000001, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.exist(err); err.code.should.equal('DUST_AMOUNT'); err.message.should.equal('Amount below dust threshold'); done(); }); }); }); it('should fail to create tx that would return change for dust amount', function(done) { helpers.stubUtxos(server, wallet, [1], function() { var fee = 4095 / 1e8; // The exact fee of the resulting tx var change = 100 / 1e8; // Below dust var amount = 1 - fee - change; var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', amount, TestData.copayers[0].privKey_1H_0, { feePerKb: 10000 }); server.createTx(txOpts, function(err, tx) { should.exist(err); err.code.should.equal('DUST_AMOUNT'); err.message.should.equal('Amount below dust threshold'); done(); }); }); }); it('should fail with different error for insufficient funds and locked funds', function(done) { helpers.stubUtxos(server, wallet, [10, 10], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 11, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(err); server.getBalance({}, function(err, balance) { should.not.exist(err); balance.totalAmount.should.equal(helpers.toSatoshi(20)); balance.lockedAmount.should.equal(helpers.toSatoshi(20)); txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 8, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.exist(err); err.code.should.equal('LOCKED_FUNDS'); err.message.should.equal('Funds are locked by pending transaction proposals'); done(); }); }); }); }); }); it('should create tx with 0 change output', function(done) { helpers.stubUtxos(server, wallet, [1], function() { var fee = 4100 / 1e8; // The exact fee of the resulting tx var amount = 1 - fee; var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', amount, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); var bitcoreTx = tx.getBitcoreTx(); bitcoreTx.outputs.length.should.equal(1); bitcoreTx.outputs[0].satoshis.should.equal(tx.amount); done(); }); }); }); it('should fail gracefully when bitcore throws exception on raw tx creation', function(done) { helpers.stubUtxos(server, wallet, [10], function() { var bitcoreStub = sinon.stub(Bitcore, 'Transaction'); bitcoreStub.throws({ name: 'dummy', message: 'dummy exception' }); var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 2, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.exist(err); err.message.should.equal('dummy exception'); bitcoreStub.restore(); done(); }); }); }); it('should create tx when there is a pending tx and enough UTXOs', function(done) { helpers.stubUtxos(server, wallet, [10.1, 10.2, 10.3], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 12, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); var txOpts2 = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 8, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts2, function(err, tx) { should.not.exist(err); should.exist(tx); server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs.length.should.equal(2); server.getBalance({}, function(err, balance) { should.not.exist(err); balance.totalAmount.should.equal(3060000000); balance.lockedAmount.should.equal(3060000000); done(); }); }); }); }); }); }); it('should fail to create tx when there is a pending tx and not enough UTXOs', function(done) { helpers.stubUtxos(server, wallet, [10.1, 10.2, 10.3], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 12, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); var txOpts2 = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 24, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts2, function(err, tx) { err.code.should.equal('LOCKED_FUNDS'); should.not.exist(tx); server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs.length.should.equal(1); server.getBalance({}, function(err, balance) { should.not.exist(err); balance.totalAmount.should.equal(helpers.toSatoshi(30.6)); var amountInputs = _.sum(txs[0].inputs, 'satoshis'); balance.lockedAmount.should.equal(amountInputs); balance.lockedAmount.should.be.below(balance.totalAmount); balance.availableAmount.should.equal(balance.totalAmount - balance.lockedAmount); done(); }); }); }); }); }); }); it('should create tx using different UTXOs for simultaneous requests', function(done) { var N = 5; helpers.stubUtxos(server, wallet, _.range(100, 100 + N, 0), function(utxos) { server.getBalance({}, function(err, balance) { should.not.exist(err); balance.totalAmount.should.equal(helpers.toSatoshi(N * 100)); balance.lockedAmount.should.equal(0); var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, TestData.copayers[0].privKey_1H_0); async.map(_.range(N), function(i, cb) { server.createTx(txOpts, function(err, tx) { cb(err, tx); }); }, function(err) { server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs.length.should.equal(N); _.uniq(_.pluck(txs, 'changeAddress')).length.should.equal(N); server.getBalance({}, function(err, balance) { should.not.exist(err); balance.totalAmount.should.equal(helpers.toSatoshi(N * 100)); balance.lockedAmount.should.equal(balance.totalAmount); done(); }); }); }); }); }); }); it('should create tx for type multiple_outputs', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var outputs = [{ toAddress: '18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', amount: 75, message: 'message #1' }, { toAddress: '18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', amount: 75, message: 'message #2' }]; var txOpts = helpers.createProposalOpts(Model.TxProposal.Types.MULTIPLEOUTPUTS, outputs, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); done(); }); }); }); it('should fail to create tx for type multiple_outputs with missing output argument', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var outputs = [{ amount: 80, message: 'message #1', }, { toAddress: '18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', amount: 90, message: 'message #2' }]; var txOpts = helpers.createProposalOpts(Model.TxProposal.Types.MULTIPLEOUTPUTS, outputs, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { should.exist(err); err.message.should.contain('outputs argument missing'); done(); }); }); }); it('should fail to create tx for unsupported proposal type', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); txOpts.type = 'bogus'; server.createTx(txOpts, function(err, tx) { should.exist(err); err.message.should.contain('Invalid proposal type'); done(); }); }); }); it('should be able to send max amount', function(done) { helpers.stubUtxos(server, wallet, _.range(1, 10, 0), function() { server.getBalance({}, function(err, balance) { should.not.exist(err); balance.totalAmount.should.equal(helpers.toSatoshi(9)); balance.lockedAmount.should.equal(0); balance.availableAmount.should.equal(helpers.toSatoshi(9)); balance.totalBytesToSendMax.should.equal(2896); var fee = parseInt((balance.totalBytesToSendMax * 10000 / 1000).toFixed(0)); var max = balance.availableAmount - fee; var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', max / 1e8, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); tx.amount.should.equal(max); var estimatedFee = 2896 * 10000 / 1000; tx.fee.should.be.within(0.9 * estimatedFee, 1.1 * estimatedFee); server.getBalance({}, function(err, balance) { should.not.exist(err); balance.lockedAmount.should.equal(helpers.toSatoshi(9)); balance.availableAmount.should.equal(0); done(); }); }); }); }); }); it('should be able to send max non-locked amount', function(done) { helpers.stubUtxos(server, wallet, _.range(1, 10, 0), function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 3.5, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(err); server.getBalance({}, function(err, balance) { should.not.exist(err); balance.totalAmount.should.equal(helpers.toSatoshi(9)); balance.lockedAmount.should.equal(helpers.toSatoshi(4)); balance.availableAmount.should.equal(helpers.toSatoshi(5)); balance.totalBytesToSendMax.should.equal(1653); var fee = parseInt((balance.totalBytesToSendMax * 2000 / 1000).toFixed(0)); var max = balance.availableAmount - fee; var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', max / 1e8, TestData.copayers[0].privKey_1H_0, { feePerKb: 2000 }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); tx.amount.should.equal(max); var estimatedFee = 1653 * 2000 / 1000; tx.fee.should.be.within(0.9 * estimatedFee, 1.1 * estimatedFee); server.getBalance({}, function(err, balance) { should.not.exist(err); balance.lockedAmount.should.equal(helpers.toSatoshi(9)); done(); }); }); }); }); }); }); it('should not use UTXO provided in utxosToExclude option', function(done) { helpers.stubUtxos(server, wallet, [1, 2, 3], function(utxos) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 4.5, TestData.copayers[0].privKey_1H_0); txOpts.utxosToExclude = [utxos[1].txid + ':' + utxos[1].vout]; server.createTx(txOpts, function(err, tx) { should.exist(err); err.code.should.equal('INSUFFICIENT_FUNDS'); err.message.should.equal('Insufficient funds'); done(); }); }); }); it('should use non-excluded UTXOs', function(done) { helpers.stubUtxos(server, wallet, [1, 2], function(utxos) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.5, TestData.copayers[0].privKey_1H_0); txOpts.utxosToExclude = [utxos[0].txid + ':' + utxos[0].vout]; server.createTx(txOpts, function(err, tx) { should.not.exist(err); tx.inputs.length.should.equal(1); tx.inputs[0].txid.should.equal(utxos[1].txid); tx.inputs[0].vout.should.equal(utxos[1].vout); done(); }); }); }); }); describe('#createTx backoff time', function(done) { var server, wallet, txid; beforeEach(function(done) { helpers.createAndJoinWallet(2, 2, function(s, w) { server = s; wallet = w; helpers.stubUtxos(server, wallet, _.range(2, 6), function() { done(); }); }); }); it('should follow backoff time after consecutive rejections', function(done) { async.series([ function(next) { async.each(_.range(3), function(i, next) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 1, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(err); server.rejectTx({ txProposalId: tx.id, reason: 'some reason', }, next); }); }, next); }, function(next) { // Allow a 4th tx var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 1, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { server.rejectTx({ txProposalId: tx.id, reason: 'some reason', }, next); }); }, function(next) { // Do not allow before backoff time var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 1, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.exist(err); err.code.should.equal('TX_CANNOT_CREATE'); next(); }); }, function(next) { var clock = sinon.useFakeTimers(Date.now() + (WalletService.BACKOFF_TIME + 2) * 60 * 1000, 'Date'); var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 1, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { clock.restore(); server.rejectTx({ txProposalId: tx.id, reason: 'some reason', }, next); }); }, function(next) { // Do not allow a 5th tx before backoff time var clock = sinon.useFakeTimers(Date.now() + (WalletService.BACKOFF_TIME + 2) * 60 * 1000 + 1, 'Date'); var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 1, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { clock.restore(); should.exist(err); err.code.should.equal('TX_CANNOT_CREATE'); next(); }); }, ], function(err) { should.not.exist(err); done(); }); }); }); describe('#rejectTx', function() { var server, wallet, txid; beforeEach(function(done) { helpers.createAndJoinWallet(2, 2, function(s, w) { server = s; wallet = w; helpers.stubUtxos(server, wallet, _.range(1, 9), function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 10, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); txid = tx.id; done(); }); }); }); }); it('should reject a TX', function(done) { server.getPendingTxs({}, function(err, txs) { var tx = txs[0]; tx.id.should.equal(txid); server.rejectTx({ txProposalId: txid, reason: 'some reason', }, function(err) { should.not.exist(err); server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs.should.be.empty; server.getTx({ txProposalId: txid }, function(err, tx) { var actors = tx.getActors(); actors.length.should.equal(1); actors[0].should.equal(wallet.copayers[0].id); var action = tx.getActionBy(wallet.copayers[0].id); action.type.should.equal('reject'); action.comment.should.equal('some reason'); done(); }); }); }); }); }); it('should fail to reject non-pending TX', function(done) { async.waterfall([ function(next) { server.getPendingTxs({}, function(err, txs) { var tx = txs[0]; tx.id.should.equal(txid); next(); }); }, function(next) { server.rejectTx({ txProposalId: txid, reason: 'some reason', }, function(err) { should.not.exist(err); next(); }); }, function(next) { server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs.should.be.empty; next(); }); }, function(next) { helpers.getAuthServer(wallet.copayers[1].id, function(server) { server.rejectTx({ txProposalId: txid, reason: 'some other reason', }, function(err) { should.exist(err); err.code.should.equal('TX_NOT_PENDING'); done(); }); }); }, ]); }); }); describe('#signTx', function() { describe('1-of-1 (BIP44 & P2PKH)', function() { var server, wallet, txid; beforeEach(function(done) { helpers.createAndJoinWallet(1, 1, function(s, w) { server = s; wallet = w; helpers.stubUtxos(server, wallet, [1, 2], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 2.5, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); tx.derivationStrategy.should.equal('BIP44'); tx.addressType.should.equal('P2PKH'); txid = tx.id; done(); }); }); }); }); it('should sign a TX with multiple inputs, different paths, and return raw', function(done) { blockchainExplorer.getTransaction = sinon.stub().callsArgWith(1, null, null); server.getPendingTxs({}, function(err, txs) { var tx = txs[0]; tx.id.should.equal(txid); var signatures = helpers.clientSign(tx, TestData.copayers[0].xPrivKey); should.not.exist(tx.raw); server.signTx({ txProposalId: txid, signatures: signatures, }, function(err, txp) { should.not.exist(err); txp.status.should.equal('accepted'); // The raw Tx should contain the Signatures. txp.raw.should.contain(signatures[0]); // Get pending should also contains the raw TX server.getPendingTxs({}, function(err, txs) { var tx = txs[0]; should.not.exist(err); tx.status.should.equal('accepted'); tx.raw.should.contain(signatures[0]); done(); }); }); }); }); }); describe('Multisig', function() { var server, wallet, txid; beforeEach(function(done) { helpers.createAndJoinWallet(2, 3, function(s, w) { server = s; wallet = w; helpers.stubUtxos(server, wallet, _.range(1, 9), function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 20, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); txid = tx.id; done(); }); }); }); }); it('should sign a TX with multiple inputs, different paths', function(done) { server.getPendingTxs({}, function(err, txs) { var tx = txs[0]; tx.id.should.equal(txid); var signatures = helpers.clientSign(tx, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: txid, signatures: signatures, }, function(err, txp) { should.not.exist(err); should.not.exist(tx.raw); server.getPendingTxs({}, function(err, txs) { should.not.exist(err); var tx = txs[0]; tx.id.should.equal(txid); var actors = tx.getActors(); actors.length.should.equal(1); actors[0].should.equal(wallet.copayers[0].id); tx.getActionBy(wallet.copayers[0].id).type.should.equal('accept'); done(); }); }); }); }); it('should fail to sign with a xpriv from other copayer', function(done) { server.getPendingTxs({}, function(err, txs) { var tx = txs[0]; tx.id.should.equal(txid); var signatures = helpers.clientSign(tx, TestData.copayers[1].xPrivKey); server.signTx({ txProposalId: txid, signatures: signatures, }, function(err) { err.code.should.equal('BAD_SIGNATURES'); done(); }); }); }); it('should fail if one signature is broken', function(done) { server.getPendingTxs({}, function(err, txs) { var tx = txs[0]; tx.id.should.equal(txid); var signatures = helpers.clientSign(tx, TestData.copayers[0].xPrivKey); signatures[0] = 1; server.signTx({ txProposalId: txid, signatures: signatures, }, function(err) { err.message.should.contain('signatures'); done(); }); }); }); it('should fail on invalid signature', function(done) { server.getPendingTxs({}, function(err, txs) { var tx = txs[0]; tx.id.should.equal(txid); var signatures = ['11', '22', '33', '44', '55']; server.signTx({ txProposalId: txid, signatures: signatures, }, function(err) { should.exist(err); err.message.should.contain('Bad signatures'); done(); }); }); }); it('should fail on wrong number of invalid signatures', function(done) { server.getPendingTxs({}, function(err, txs) { var tx = txs[0]; tx.id.should.equal(txid); var signatures = _.take(helpers.clientSign(tx, TestData.copayers[0].xPrivKey), tx.inputs.length - 1); server.signTx({ txProposalId: txid, signatures: signatures, }, function(err) { should.exist(err); err.message.should.contain('Bad signatures'); done(); }); }); }); it('should fail when signing a TX previously rejected', function(done) { server.getPendingTxs({}, function(err, txs) { var tx = txs[0]; tx.id.should.equal(txid); var signatures = helpers.clientSign(tx, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: txid, signatures: signatures, }, function(err) { server.rejectTx({ txProposalId: txid, }, function(err) { err.code.should.contain('COPAYER_VOTED'); done(); }); }); }); }); it('should fail when rejected a previously signed TX', function(done) { server.getPendingTxs({}, function(err, txs) { var tx = txs[0]; tx.id.should.equal(txid); server.rejectTx({ txProposalId: txid, }, function(err) { var signatures = helpers.clientSign(tx, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: txid, signatures: signatures, }, function(err) { err.code.should.contain('COPAYER_VOTED'); done(); }); }); }); }); it('should fail to sign a non-pending TX', function(done) { async.waterfall([ function(next) { server.rejectTx({ txProposalId: txid, reason: 'some reason', }, function(err) { should.not.exist(err); next(); }); }, function(next) { helpers.getAuthServer(wallet.copayers[1].id, function(server) { server.rejectTx({ txProposalId: txid, reason: 'some reason', }, function(err) { should.not.exist(err); next(); }); }); }, function(next) { server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs.should.be.empty; next(); }); }, function(next) { helpers.getAuthServer(wallet.copayers[2].id, function(server) { server.getTx({ txProposalId: txid }, function(err, tx) { should.not.exist(err); var signatures = helpers.clientSign(tx, TestData.copayers[2].xPrivKey); server.signTx({ txProposalId: txid, signatures: signatures, }, function(err) { should.exist(err); err.code.should.equal('TX_NOT_PENDING'); done(); }); }); }); }, ]); }); }); }); describe('#broadcastTx & #broadcastRawTx', function() { var server, wallet, txpid, txid; beforeEach(function(done) { helpers.createAndJoinWallet(1, 1, function(s, w) { server = s; wallet = w; helpers.stubUtxos(server, wallet, [10, 10], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 9, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, txp) { should.not.exist(err); should.exist(txp); var signatures = helpers.clientSign(txp, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: txp.id, signatures: signatures, }, function(err, txp) { should.not.exist(err); should.exist(txp); txp.isAccepted().should.be.true; txp.isBroadcasted().should.be.false; txid = txp.txid; txpid = txp.id; done(); }); }); }); }); }); it('should broadcast a tx', function(done) { var clock = sinon.useFakeTimers(1234000, 'Date'); helpers.stubBroadcast(); server.broadcastTx({ txProposalId: txpid }, function(err) { should.not.exist(err); server.getTx({ txProposalId: txpid }, function(err, txp) { should.not.exist(err); should.not.exist(txp.raw); txp.txid.should.equal(txid); txp.isBroadcasted().should.be.true; txp.broadcastedOn.should.equal(1234); clock.restore(); done(); }); }); }); it('should broadcast a raw tx', function(done) { helpers.stubBroadcast(); server.broadcastRawTx({ network: 'testnet', rawTx: 'raw tx', }, function(err, txid) { should.not.exist(err); should.exist(txid); done(); }); }); it('should fail to brodcast a tx already marked as broadcasted', function(done) { helpers.stubBroadcast(); server.broadcastTx({ txProposalId: txpid }, function(err) { should.not.exist(err); server.broadcastTx({ txProposalId: txpid }, function(err) { should.exist(err); err.code.should.equal('TX_ALREADY_BROADCASTED'); done(); }); }); }); it('should auto process already broadcasted txs', function(done) { helpers.stubBroadcast(); server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs.length.should.equal(1); blockchainExplorer.getTransaction = sinon.stub().callsArgWith(1, null, { txid: 999 }); server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs.length.should.equal(0); done(); }); }); }); it('should process only broadcasted txs', function(done) { helpers.stubBroadcast(); var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 9, TestData.copayers[0].privKey_1H_0, { message: 'some message 2' }); server.createTx(txOpts, function(err, txp) { should.not.exist(err); server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs.length.should.equal(2); blockchainExplorer.getTransaction = sinon.stub().callsArgWith(1, null, { txid: 999 }); server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs.length.should.equal(1); txs[0].status.should.equal('pending'); should.not.exist(txs[0].txid); done(); }); }); }); }); it('should fail to brodcast a not yet accepted tx', function(done) { helpers.stubBroadcast(); var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 9, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, txp) { should.not.exist(err); should.exist(txp); server.broadcastTx({ txProposalId: txp.id }, function(err) { should.exist(err); err.code.should.equal('TX_NOT_ACCEPTED'); done(); }); }); }); it('should keep tx as accepted if unable to broadcast it', function(done) { blockchainExplorer.broadcast = sinon.stub().callsArgWith(1, 'broadcast error'); blockchainExplorer.getTransaction = sinon.stub().callsArgWith(1, null, null); server.broadcastTx({ txProposalId: txpid }, function(err) { should.exist(err); err.toString().should.equal('broadcast error'); server.getTx({ txProposalId: txpid }, function(err, txp) { should.not.exist(err); should.exist(txp.txid); txp.isBroadcasted().should.be.false; should.not.exist(txp.broadcastedOn); txp.isAccepted().should.be.true; done(); }); }); }); it('should mark tx as broadcasted if accepted but already in blockchain', function(done) { blockchainExplorer.broadcast = sinon.stub().callsArgWith(1, 'broadcast error'); blockchainExplorer.getTransaction = sinon.stub().callsArgWith(1, null, { txid: '999' }); server.broadcastTx({ txProposalId: txpid }, function(err) { should.not.exist(err); server.getTx({ txProposalId: txpid }, function(err, txp) { should.not.exist(err); should.exist(txp.txid); txp.isBroadcasted().should.be.true; should.exist(txp.broadcastedOn); done(); }); }); }); it('should keep tx as accepted if broadcast fails and cannot check tx in blockchain', function(done) { blockchainExplorer.broadcast = sinon.stub().callsArgWith(1, 'broadcast error'); blockchainExplorer.getTransaction = sinon.stub().callsArgWith(1, 'bc check error'); server.broadcastTx({ txProposalId: txpid }, function(err) { should.exist(err); err.toString().should.equal('bc check error'); server.getTx({ txProposalId: txpid }, function(err, txp) { should.not.exist(err); should.exist(txp.txid); txp.isBroadcasted().should.be.false; should.not.exist(txp.broadcastedOn); txp.isAccepted().should.be.true; done(); }); }); }); }); describe('Tx proposal workflow', function() { var server, wallet; beforeEach(function(done) { helpers.createAndJoinWallet(2, 3, function(s, w) { server = s; wallet = w; helpers.stubUtxos(server, wallet, _.range(1, 9), function() { helpers.stubBroadcast(); done(); }); }); }); it('other copayers should see pending proposal created by one copayer', function(done) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 10, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, txp) { should.not.exist(err); should.exist(txp); helpers.getAuthServer(wallet.copayers[1].id, function(server2, wallet) { server2.getPendingTxs({}, function(err, txps) { should.not.exist(err); txps.length.should.equal(1); txps[0].id.should.equal(txp.id); txps[0].message.should.equal('some message'); done(); }); }); }); }); it('tx proposals should not be finally accepted until quorum is reached', function(done) { var txpId; async.waterfall([ function(next) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 10, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, txp) { txpId = txp.id; should.not.exist(err); should.exist(txp); next(); }); }, function(next) { server.getPendingTxs({}, function(err, txps) { should.not.exist(err); txps.length.should.equal(1); var txp = txps[0]; txp.actions.should.be.empty; next(null, txp); }); }, function(txp, next) { var signatures = helpers.clientSign(txp, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: txpId, signatures: signatures, }, function(err) { should.not.exist(err); next(); }); }, function(next) { server.getPendingTxs({}, function(err, txps) { should.not.exist(err); txps.length.should.equal(1); var txp = txps[0]; txp.isPending().should.be.true; txp.isAccepted().should.be.false; txp.isRejected().should.be.false; txp.isBroadcasted().should.be.false; txp.actions.length.should.equal(1); var action = txp.getActionBy(wallet.copayers[0].id); action.type.should.equal('accept'); server.getNotifications({}, function(err, notifications) { should.not.exist(err); var last = _.last(notifications); last.type.should.not.equal('TxProposalFinallyAccepted'); next(null, txp); }); }); }, function(txp, next) { helpers.getAuthServer(wallet.copayers[1].id, function(server, wallet) { var signatures = helpers.clientSign(txp, TestData.copayers[1].xPrivKey); server.signTx({ txProposalId: txpId, signatures: signatures, }, function(err) { should.not.exist(err); next(); }); }); }, function(next) { server.getPendingTxs({}, function(err, txps) { should.not.exist(err); txps.length.should.equal(1); var txp = txps[0]; txp.isPending().should.be.true; txp.isAccepted().should.be.true; txp.isBroadcasted().should.be.false; should.exist(txp.txid); txp.actions.length.should.equal(2); server.getNotifications({}, function(err, notifications) { should.not.exist(err); var last = _.last(notifications); last.type.should.equal('TxProposalFinallyAccepted'); last.walletId.should.equal(wallet.id); last.creatorId.should.equal(wallet.copayers[1].id); last.data.txProposalId.should.equal(txp.id); done(); }); }); }, ]); }); it('tx proposals should accept as many rejections as possible without finally rejecting', function(done) { var txpId; async.waterfall([ function(next) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 10, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, txp) { txpId = txp.id; should.not.exist(err); should.exist(txp); next(); }); }, function(next) { server.getPendingTxs({}, function(err, txps) { should.not.exist(err); txps.length.should.equal(1); var txp = txps[0]; txp.actions.should.be.empty; next(); }); }, function(next) { server.rejectTx({ txProposalId: txpId, reason: 'just because' }, function(err) { should.not.exist(err); next(); }); }, function(next) { server.getPendingTxs({}, function(err, txps) { should.not.exist(err); txps.length.should.equal(1); var txp = txps[0]; txp.isPending().should.be.true; txp.isRejected().should.be.false; txp.isAccepted().should.be.false; txp.actions.length.should.equal(1); var action = txp.getActionBy(wallet.copayers[0].id); action.type.should.equal('reject'); action.comment.should.equal('just because'); next(); }); }, function(next) { helpers.getAuthServer(wallet.copayers[1].id, function(server, wallet) { server.rejectTx({ txProposalId: txpId, reason: 'some other reason' }, function(err) { should.not.exist(err); next(); }); }); }, function(next) { server.getPendingTxs({}, function(err, txps) { should.not.exist(err); txps.length.should.equal(0); next(); }); }, function(next) { server.getTx({ txProposalId: txpId }, function(err, txp) { should.not.exist(err); txp.isPending().should.be.false; txp.isRejected().should.be.true; txp.isAccepted().should.be.false; txp.actions.length.should.equal(2); done(); }); }, ]); }); }); describe('#getTx', function() { var server, wallet, txpid; beforeEach(function(done) { helpers.createAndJoinWallet(2, 3, function(s, w) { server = s; wallet = w; helpers.stubUtxos(server, wallet, 10, function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 9, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, txp) { should.not.exist(err); should.exist(txp); txpid = txp.id; done(); }); }); }); }); it('should get own transaction proposal', function(done) { server.getTx({ txProposalId: txpid }, function(err, txp) { should.not.exist(err); should.exist(txp); txp.id.should.equal(txpid); done(); }); }); it('should get someone elses transaction proposal', function(done) { helpers.getAuthServer(wallet.copayers[1].id, function(server2, wallet) { server2.getTx({ txProposalId: txpid }, function(err, res) { should.not.exist(err); res.id.should.equal(txpid); done(); }); }); }); it('should fail to get non-existent transaction proposal', function(done) { server.getTx({ txProposalId: 'dummy' }, function(err, txp) { should.exist(err); should.not.exist(txp); err.code.should.equal('TX_NOT_FOUND') err.message.should.equal('Transaction proposal not found'); done(); }); }); it.skip('should get accepted/rejected transaction proposal', function(done) {}); it.skip('should get broadcasted transaction proposal', function(done) {}); }); describe('#getTxs', function() { var server, wallet, clock; beforeEach(function(done) { this.timeout(5000); clock = sinon.useFakeTimers('Date'); helpers.createAndJoinWallet(1, 1, function(s, w) { server = s; wallet = w; helpers.stubUtxos(server, wallet, _.range(1, 11), function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.1, TestData.copayers[0].privKey_1H_0); async.eachSeries(_.range(10), function(i, next) { clock.tick(10 * 1000); server.createTx(txOpts, function(err, tx) { should.not.exist(err); next(); }); }, function(err) { clock.restore(); return done(err); }); }); }); }); afterEach(function() { clock.restore(); }); it('should pull 4 txs, down to to time 60', function(done) { server.getTxs({ minTs: 60, limit: 8 }, function(err, txps) { should.not.exist(err); var times = _.pluck(txps, 'createdOn'); times.should.deep.equal([100, 90, 80, 70, 60]); done(); }); }); it('should pull the first 5 txs', function(done) { server.getTxs({ maxTs: 50, limit: 5 }, function(err, txps) { should.not.exist(err); var times = _.pluck(txps, 'createdOn'); times.should.deep.equal([50, 40, 30, 20, 10]); done(); }); }); it('should pull the last 4 txs', function(done) { server.getTxs({ limit: 4 }, function(err, txps) { should.not.exist(err); var times = _.pluck(txps, 'createdOn'); times.should.deep.equal([100, 90, 80, 70]); done(); }); }); it('should pull all txs', function(done) { server.getTxs({}, function(err, txps) { should.not.exist(err); var times = _.pluck(txps, 'createdOn'); times.should.deep.equal([100, 90, 80, 70, 60, 50, 40, 30, 20, 10]); done(); }); }); it('should txs from times 50 to 70', function(done) { server.getTxs({ minTs: 50, maxTs: 70, }, function(err, txps) { should.not.exist(err); var times = _.pluck(txps, 'createdOn'); times.should.deep.equal([70, 60, 50]); done(); }); }); }); describe('#getNotifications', function() { var clock; var server, wallet; beforeEach(function(done) { clock = sinon.useFakeTimers(10 * 1000, 'Date'); helpers.createAndJoinWallet(1, 1, function(s, w) { server = s; wallet = w; helpers.stubUtxos(server, wallet, _.range(4), function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.01, TestData.copayers[0].privKey_1H_0); async.eachSeries(_.range(3), function(i, next) { clock.tick(25 * 1000); server.createTx(txOpts, function(err, tx) { should.not.exist(err); next(); }); }, function(err) { clock.tick(20 * 1000); return done(err); }); }); }); }); afterEach(function() { clock.restore(); }); it('should pull all notifications', function(done) { server.getNotifications({}, function(err, notifications) { should.not.exist(err); var types = _.pluck(notifications, 'type'); types.should.deep.equal(['NewCopayer', 'NewAddress', 'NewAddress', 'NewTxProposal', 'NewTxProposal', 'NewTxProposal']); var walletIds = _.uniq(_.pluck(notifications, 'walletId')); walletIds.length.should.equal(1); walletIds[0].should.equal(wallet.id); var creators = _.uniq(_.compact(_.pluck(notifications, 'creatorId'))); creators.length.should.equal(1); creators[0].should.equal(wallet.copayers[0].id); done(); }); }); it('should pull new block notifications along with wallet notifications in the last 60 seconds', function(done) { // Simulate new block notification server.walletId = 'livenet'; server._notify('NewBlock', { hash: 'dummy hash', }, { isGlobal: true }, function(err) { should.not.exist(err); server.walletId = 'testnet'; server._notify('NewBlock', { hash: 'dummy hash', }, { isGlobal: true }, function(err) { should.not.exist(err); server.walletId = wallet.id; server.getNotifications({ minTs: +Date.now() - (60 * 1000), }, function(err, notifications) { should.not.exist(err); var types = _.pluck(notifications, 'type'); types.should.deep.equal(['NewTxProposal', 'NewTxProposal', 'NewBlock']); var walletIds = _.uniq(_.pluck(notifications, 'walletId')); walletIds.length.should.equal(1); walletIds[0].should.equal(wallet.id); done(); }); }); }); }); it('should pull notifications in the last 60 seconds', function(done) { server.getNotifications({ minTs: +Date.now() - (60 * 1000), }, function(err, notifications) { should.not.exist(err); var types = _.pluck(notifications, 'type'); types.should.deep.equal(['NewTxProposal', 'NewTxProposal']); done(); }); }); it('should pull notifications after a given notification id', function(done) { server.getNotifications({}, function(err, notifications) { should.not.exist(err); var from = _.first(_.takeRight(notifications, 2)).id; // second to last server.getNotifications({ notificationId: from, minTs: +Date.now() - (60 * 1000), }, function(err, res) { should.not.exist(err); res.length.should.equal(1); res[0].id.should.equal(_.first(_.takeRight(notifications)).id); done(); }); }); }); it('should return empty if no notifications found after a given id', function(done) { server.getNotifications({}, function(err, notifications) { should.not.exist(err); var from = _.first(_.takeRight(notifications)).id; // last one server.getNotifications({ notificationId: from, }, function(err, res) { should.not.exist(err); res.length.should.equal(0); done(); }); }); }); it('should return empty if no notifications exist in the given timespan', function(done) { clock.tick(100 * 1000); server.getNotifications({ minTs: +Date.now() - (60 * 1000), }, function(err, res) { should.not.exist(err); res.length.should.equal(0); done(); }); }); it('should contain walletId & creatorId on NewCopayer', function(done) { server.getNotifications({}, function(err, notifications) { should.not.exist(err); var newCopayer = notifications[0]; newCopayer.type.should.equal('NewCopayer'); newCopayer.walletId.should.equal(wallet.id); newCopayer.creatorId.should.equal(wallet.copayers[0].id); done(); }); }); it('should notify sign and acceptance', function(done) { server.getPendingTxs({}, function(err, txs) { blockchainExplorer.broadcast = sinon.stub().callsArgWith(1, 'broadcast error'); var tx = txs[0]; var signatures = helpers.clientSign(tx, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: tx.id, signatures: signatures, }, function(err) { server.getNotifications({ minTs: Date.now(), }, function(err, notifications) { should.not.exist(err); notifications.length.should.equal(2); var types = _.pluck(notifications, 'type'); types.should.deep.equal(['TxProposalAcceptedBy', 'TxProposalFinallyAccepted']); done(); }); }); }); }); it('should notify rejection', function(done) { server.getPendingTxs({}, function(err, txs) { var tx = txs[1]; server.rejectTx({ txProposalId: tx.id, }, function(err) { should.not.exist(err); server.getNotifications({ minTs: Date.now(), }, function(err, notifications) { should.not.exist(err); notifications.length.should.equal(2); var types = _.pluck(notifications, 'type'); types.should.deep.equal(['TxProposalRejectedBy', 'TxProposalFinallyRejected']); done(); }); }); }); }); it('should notify sign, acceptance, and broadcast, and emit', function(done) { server.getPendingTxs({}, function(err, txs) { var tx = txs[2]; var signatures = helpers.clientSign(tx, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: tx.id, signatures: signatures, }, function(err) { should.not.exist(err); helpers.stubBroadcast(); server.broadcastTx({ txProposalId: tx.id }, function(err, txp) { should.not.exist(err); server.getNotifications({ minTs: Date.now(), }, function(err, notifications) { should.not.exist(err); notifications.length.should.equal(3); var types = _.pluck(notifications, 'type'); types.should.deep.equal(['TxProposalAcceptedBy', 'TxProposalFinallyAccepted', 'NewOutgoingTx']); done(); }); }); }); }); }); it('should notify sign, acceptance, and broadcast, and emit (with 3rd party broadcast', function(done) { server.getPendingTxs({}, function(err, txs) { var tx = txs[2]; var signatures = helpers.clientSign(tx, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: tx.id, signatures: signatures, }, function(err) { should.not.exist(err); blockchainExplorer.broadcast = sinon.stub().callsArgWith(1, 'err'); blockchainExplorer.getTransaction = sinon.stub().callsArgWith(1, null, { txid: 11 }); server.broadcastTx({ txProposalId: tx.id }, function(err, txp) { should.not.exist(err); server.getNotifications({ minTs: Date.now(), }, function(err, notifications) { should.not.exist(err); notifications.length.should.equal(3); var types = _.pluck(notifications, 'type'); types.should.deep.equal(['TxProposalAcceptedBy', 'TxProposalFinallyAccepted', 'NewOutgoingTxByThirdParty']); done(); }); }); }); }); }); }); describe('#removeWallet', function() { var server, wallet, clock; beforeEach(function(done) { helpers.createAndJoinWallet(1, 1, function(s, w) { server = s; wallet = w; helpers.stubUtxos(server, wallet, _.range(2), function() { var txOpts = { toAddress: '18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', amount: helpers.toSatoshi(0.1), }; async.eachSeries(_.range(2), function(i, next) { server.createTx(txOpts, function(err, tx) { next(); }); }, done); }); }); }); it('should delete a wallet', function(done) { server.removeWallet({}, function(err) { should.not.exist(err); server.getWallet({}, function(err, w) { should.exist(err); err.code.should.equal('WALLET_NOT_FOUND'); should.not.exist(w); async.parallel([ function(next) { server.storage.fetchAddresses(wallet.id, function(err, items) { items.length.should.equal(0); next(); }); }, function(next) { server.storage.fetchTxs(wallet.id, {}, function(err, items) { items.length.should.equal(0); next(); }); }, function(next) { server.storage.fetchNotifications(wallet.id, null, 0, function(err, items) { items.length.should.equal(0); next(); }); }, ], function(err) { should.not.exist(err); done(); }); }); }); }); // creates 2 wallet, and deletes only 1. it('should delete a wallet, and only that wallet', function(done) { var server2, wallet2; async.series([ function(next) { helpers.createAndJoinWallet(1, 1, { offset: 1 }, function(s, w) { server2 = s; wallet2 = w; helpers.stubUtxos(server2, wallet2, _.range(1, 3), function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.1, TestData.copayers[1].privKey_1H_0, { message: 'some message' }); async.eachSeries(_.range(2), function(i, next) { server2.createTx(txOpts, function(err, tx) { should.not.exist(err); next(err); }); }, next); }); }); }, function(next) { server.removeWallet({}, next); }, function(next) { server.getWallet({}, function(err, wallet) { should.exist(err); err.code.should.equal('WALLET_NOT_FOUND'); next(); }); }, function(next) { server2.getWallet({}, function(err, wallet) { should.not.exist(err); should.exist(wallet); wallet.id.should.equal(wallet2.id); next(); }); }, function(next) { server2.getMainAddresses({}, function(err, addresses) { should.not.exist(err); should.exist(addresses); addresses.length.should.above(0); next(); }); }, function(next) { server2.getTxs({}, function(err, txs) { should.not.exist(err); should.exist(txs); txs.length.should.equal(2); next(); }); }, function(next) { server2.getNotifications({}, function(err, notifications) { should.not.exist(err); should.exist(notifications); notifications.length.should.above(0); next(); }); }, ], function(err) { should.not.exist(err); done(); }); }); }); describe('#removePendingTx', function() { var server, wallet, txp; beforeEach(function(done) { helpers.createAndJoinWallet(2, 3, function(s, w) { server = s; wallet = w; helpers.stubUtxos(server, wallet, [100, 200], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { server.getPendingTxs({}, function(err, txs) { txp = txs[0]; done(); }); }); }); }); }); it('should allow creator to remove an unsigned TX', function(done) { server.removePendingTx({ txProposalId: txp.id }, function(err) { should.not.exist(err); server.getPendingTxs({}, function(err, txs) { txs.length.should.equal(0); done(); }); }); }); it('should allow creator to remove a signed TX by himself', function(done) { var signatures = helpers.clientSign(txp, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: txp.id, signatures: signatures, }, function(err) { should.not.exist(err); server.removePendingTx({ txProposalId: txp.id }, function(err) { should.not.exist(err); server.getPendingTxs({}, function(err, txs) { txs.length.should.equal(0); done(); }); }); }); }); it('should fail to remove non-pending TX', function(done) { async.waterfall([ function(next) { var signatures = helpers.clientSign(txp, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: txp.id, signatures: signatures, }, function(err) { should.not.exist(err); next(); }); }, function(next) { helpers.getAuthServer(wallet.copayers[1].id, function(server) { server.rejectTx({ txProposalId: txp.id, }, function(err) { should.not.exist(err); next(); }); }); }, function(next) { helpers.getAuthServer(wallet.copayers[2].id, function(server) { server.rejectTx({ txProposalId: txp.id, }, function(err) { should.not.exist(err); next(); }); }); }, function(next) { server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs.should.be.empty; next(); }); }, function(next) { server.removePendingTx({ txProposalId: txp.id }, function(err) { should.exist(err); err.code.should.equal('TX_NOT_PENDING'); done(); }); }, ]); }); it('should not allow non-creator copayer to remove an unsigned TX ', function(done) { helpers.getAuthServer(wallet.copayers[1].id, function(server2) { server2.removePendingTx({ txProposalId: txp.id }, function(err) { should.exist(err); err.code.should.contain('TX_CANNOT_REMOVE'); server2.getPendingTxs({}, function(err, txs) { txs.length.should.equal(1); done(); }); }); }); }); it('should not allow creator copayer to remove a TX signed by other copayer, in less than 24hrs', function(done) { helpers.getAuthServer(wallet.copayers[1].id, function(server2) { var signatures = helpers.clientSign(txp, TestData.copayers[1].xPrivKey); server2.signTx({ txProposalId: txp.id, signatures: signatures, }, function(err) { should.not.exist(err); server.removePendingTx({ txProposalId: txp.id }, function(err) { err.code.should.equal('TX_CANNOT_REMOVE'); err.message.should.contain('Cannot remove'); done(); }); }); }); }); it('should allow creator copayer to remove a TX rejected by other copayer, in less than 24hrs', function(done) { helpers.getAuthServer(wallet.copayers[1].id, function(server2) { var signatures = helpers.clientSign(txp, TestData.copayers[1].xPrivKey); server2.rejectTx({ txProposalId: txp.id, signatures: signatures, }, function(err) { should.not.exist(err); server.removePendingTx({ txProposalId: txp.id }, function(err) { should.not.exist(err); done(); }); }); }); }); it('should allow creator copayer to remove a TX signed by other copayer, after 24hrs', function(done) { helpers.getAuthServer(wallet.copayers[1].id, function(server2) { var signatures = helpers.clientSign(txp, TestData.copayers[1].xPrivKey); server2.signTx({ txProposalId: txp.id, signatures: signatures, }, function(err) { should.not.exist(err); server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs[0].deleteLockTime.should.be.above(WalletService.DELETE_LOCKTIME - 10); var clock = sinon.useFakeTimers(Date.now() + 1 + 24 * 3600 * 1000, 'Date'); server.removePendingTx({ txProposalId: txp.id }, function(err) { should.not.exist(err); clock.restore(); done(); }); }); }); }); }); it('should allow other copayer to remove a TX signed, after 24hrs', function(done) { helpers.getAuthServer(wallet.copayers[1].id, function(server2) { var signatures = helpers.clientSign(txp, TestData.copayers[1].xPrivKey); server2.signTx({ txProposalId: txp.id, signatures: signatures, }, function(err) { should.not.exist(err); var clock = sinon.useFakeTimers(Date.now() + 2000 + WalletService.DELETE_LOCKTIME * 1000, 'Date'); server2.removePendingTx({ txProposalId: txp.id }, function(err) { should.not.exist(err); clock.restore(); done(); }); }); }); }); }); describe('#getTxHistory', function() { var server, wallet, mainAddresses, changeAddresses; beforeEach(function(done) { helpers.createAndJoinWallet(1, 1, function(s, w) { server = s; wallet = w; helpers.createAddresses(server, wallet, 1, 1, function(main, change) { mainAddresses = main; changeAddresses = change; done(); }); }); }); it('should get tx history from insight', function(done) { helpers.stubHistory(TestData.history); server.getTxHistory({}, function(err, txs) { should.not.exist(err); should.exist(txs); txs.length.should.equal(2); done(); }); }); it('should get tx history for incoming txs', function(done) { server._normalizeTxHistory = sinon.stub().returnsArg(0); var txs = [{ txid: '1', confirmations: 1, fees: 100, time: 20, inputs: [{ address: 'external', amount: 500, }], outputs: [{ address: mainAddresses[0].address, amount: 200, }], }]; helpers.stubHistory(txs); server.getTxHistory({}, function(err, txs) { should.not.exist(err); should.exist(txs); txs.length.should.equal(1); var tx = txs[0]; tx.action.should.equal('received'); tx.amount.should.equal(200); tx.fees.should.equal(100); tx.time.should.equal(20); done(); }); }); it('should get tx history for outgoing txs', function(done) { server._normalizeTxHistory = sinon.stub().returnsArg(0); var txs = [{ txid: '1', confirmations: 1, fees: 100, time: 1, inputs: [{ address: mainAddresses[0].address, amount: 500, }], outputs: [{ address: 'external', amount: 400, }], }]; helpers.stubHistory(txs); server.getTxHistory({}, function(err, txs) { should.not.exist(err); should.exist(txs); txs.length.should.equal(1); var tx = txs[0]; tx.action.should.equal('sent'); tx.amount.should.equal(400); tx.fees.should.equal(100); tx.time.should.equal(1); done(); }); }); it('should get tx history for outgoing txs + change', function(done) { server._normalizeTxHistory = sinon.stub().returnsArg(0); var txs = [{ txid: '1', confirmations: 1, fees: 100, time: 1, inputs: [{ address: mainAddresses[0].address, amount: 500, }], outputs: [{ address: 'external', amount: 300, }, { address: changeAddresses[0].address, amount: 100, }], }]; helpers.stubHistory(txs); server.getTxHistory({}, function(err, txs) { should.not.exist(err); should.exist(txs); txs.length.should.equal(1); var tx = txs[0]; tx.action.should.equal('sent'); tx.amount.should.equal(300); tx.fees.should.equal(100); tx.outputs[0].address.should.equal('external'); tx.outputs[0].amount.should.equal(300); done(); }); }); it('should get tx history with accepted proposal', function(done) { server._normalizeTxHistory = sinon.stub().returnsArg(0); var external = '18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7'; helpers.stubUtxos(server, wallet, [100, 200], function(utxos) { var outputs = [{ toAddress: external, amount: 50, message: undefined // no message }, { toAddress: external, amount: 30, message: 'message #2' }]; var txOpts = helpers.createProposalOpts(Model.TxProposal.Types.MULTIPLEOUTPUTS, outputs, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); var signatures = helpers.clientSign(tx, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: tx.id, signatures: signatures, }, function(err, tx) { should.not.exist(err); helpers.stubBroadcast(); server.broadcastTx({ txProposalId: tx.id }, function(err, txp) { should.not.exist(err); var txs = [{ txid: txp.txid, confirmations: 1, fees: 5460, time: 1, inputs: [{ address: tx.inputs[0].address, amount: utxos[0].satoshis, }], outputs: [{ address: changeAddresses[0].address, amount: helpers.toSatoshi(20) - 5460, }, { address: external, amount: helpers.toSatoshi(50) }, { address: external, amount: helpers.toSatoshi(30) }] }]; helpers.stubHistory(txs); server.getTxHistory({}, function(err, txs) { should.not.exist(err); should.exist(txs); txs.length.should.equal(1); var tx = txs[0]; tx.action.should.equal('sent'); tx.amount.should.equal(helpers.toSatoshi(80)); tx.message.should.equal('some message'); tx.addressTo.should.equal(external); tx.actions.length.should.equal(1); tx.actions[0].type.should.equal('accept'); tx.actions[0].copayerName.should.equal('copayer 1'); tx.proposalType.should.equal(Model.TxProposal.Types.MULTIPLEOUTPUTS); tx.outputs[0].address.should.equal(external); tx.outputs[0].amount.should.equal(helpers.toSatoshi(50)); should.not.exist(tx.outputs[0].message); should.not.exist(tx.outputs[0]['isMine']); should.not.exist(tx.outputs[0]['isChange']); tx.outputs[1].address.should.equal(external); tx.outputs[1].amount.should.equal(helpers.toSatoshi(30)); should.exist(tx.outputs[1].message); tx.outputs[1].message.should.equal('message #2'); done(); }); }); }); }); }); }); it('should get various paginated tx history', function(done) { var testCases = [{ opts: {}, expected: [50, 40, 30, 20, 10], }, { opts: { skip: 1, limit: 3, }, expected: [40, 30, 20], }, { opts: { skip: 1, limit: 2, }, expected: [40, 30], }, { opts: { skip: 2, }, expected: [30, 20, 10], }, { opts: { limit: 4, }, expected: [50, 40, 30, 20], }, { opts: { skip: 0, limit: 3, }, expected: [50, 40, 30], }, { opts: { skip: 0, limit: 0, }, expected: [], }, { opts: { skip: 4, limit: 20, }, expected: [10], }, { opts: { skip: 20, limit: 1, }, expected: [], }]; server._normalizeTxHistory = sinon.stub().returnsArg(0); var timestamps = [50, 40, 30, 20, 10]; var txs = _.map(timestamps, function(ts, idx) { return { txid: (idx + 1).toString(), confirmations: ts / 10, fees: 100, time: ts, inputs: [{ address: 'external', amount: 500, }], outputs: [{ address: mainAddresses[0].address, amount: 200, }], }; }); helpers.stubHistory(txs); async.each(testCases, function(testCase, next) { server.getTxHistory(testCase.opts, function(err, txs) { should.not.exist(err); should.exist(txs); _.pluck(txs, 'time').should.deep.equal(testCase.expected); next(); }); }, done); }); it('should fail gracefully if unable to reach the blockchain', function(done) { blockchainExplorer.getTransactions = sinon.stub().callsArgWith(3, 'dummy error'); server.getTxHistory({}, function(err, txs) { should.exist(err); err.toString().should.equal('dummy error'); done(); }); }); it('should handle invalid tx in history ', function(done) { var h = _.clone(TestData.history); h.push({ txid: 'xx' }) helpers.stubHistory(h); server.getTxHistory({}, function(err, txs) { should.not.exist(err); should.exist(txs); txs.length.should.equal(3); txs[2].action.should.equal('invalid'); done(); }); }); }); describe('#scan', function() { var server, wallet; var scanConfigOld = WalletService.SCAN_CONFIG; describe('1-of-1 wallet (BIP44 & P2PKH)', function() { beforeEach(function(done) { this.timeout(5000); WalletService.SCAN_CONFIG.maxGap = 2; helpers.createAndJoinWallet(1, 1, function(s, w) { server = s; wallet = w; done(); }); }); afterEach(function() { WalletService.SCAN_CONFIG = scanConfigOld; }); it('should scan main addresses', function(done) { helpers.stubAddressActivity( ['1L3z9LPd861FWQhf3vDn89Fnc9dkdBo2CG', // m/0/0 '1GdXraZ1gtoVAvBh49D4hK9xLm6SKgesoE', // m/0/2 '1FUzgKcyPJsYwDLUEVJYeE2N3KVaoxTjGS', // m/1/0 ]); var expectedPaths = [ 'm/0/0', 'm/0/1', 'm/0/2', 'm/1/0', ]; server.scan({}, function(err) { should.not.exist(err); server.getWallet({}, function(err, wallet) { should.not.exist(err); wallet.scanStatus.should.equal('success'); server.storage.fetchAddresses(wallet.id, function(err, addresses) { should.exist(addresses); addresses.length.should.equal(expectedPaths.length); var paths = _.pluck(addresses, 'path'); _.difference(paths, expectedPaths).length.should.equal(0); server.createAddress({}, function(err, address) { should.not.exist(err); address.path.should.equal('m/0/3'); done(); }); }); }); }); }); it('should not go beyond max gap', function(done) { helpers.stubAddressActivity( ['1L3z9LPd861FWQhf3vDn89Fnc9dkdBo2CG', // m/0/0 '1GdXraZ1gtoVAvBh49D4hK9xLm6SKgesoE', // m/0/2 '1DY9exavapgnCUWDnSTJe1BPzXcpgwAQC4', // m/0/5 '1LD7Cr68LvBPTUeXrr6YXfGrogR7TVj3WQ', // m/1/3 ]); var expectedPaths = [ 'm/0/0', 'm/0/1', 'm/0/2', ]; server.scan({}, function(err) { should.not.exist(err); server.getWallet({}, function(err, wallet) { should.not.exist(err); wallet.scanStatus.should.equal('success'); server.storage.fetchAddresses(wallet.id, function(err, addresses) { should.exist(addresses); addresses.length.should.equal(expectedPaths.length); var paths = _.pluck(addresses, 'path'); _.difference(paths, expectedPaths).length.should.equal(0); server.createAddress({}, function(err, address) { should.not.exist(err); address.path.should.equal('m/0/3'); // A rescan should see the m/0/5 address initially beyond the gap server.scan({}, function(err) { server.createAddress({}, function(err, address) { should.not.exist(err); address.path.should.equal('m/0/6'); done(); }); }); }); }); }); }); }); it('should not affect indexes on new wallet', function(done) { helpers.stubAddressActivity([]); server.scan({}, function(err) { should.not.exist(err); server.getWallet({}, function(err, wallet) { should.not.exist(err); wallet.scanStatus.should.equal('success'); server.storage.fetchAddresses(wallet.id, function(err, addresses) { should.not.exist(err); addresses.length.should.equal(0); server.createAddress({}, function(err, address) { should.not.exist(err); address.path.should.equal('m/0/0'); done(); }); }); }); }); }); it('should not rewind already generated addresses on error', function(done) { server.createAddress({}, function(err, address) { should.not.exist(err); address.path.should.equal('m/0/0'); blockchainExplorer.getAddressActivity = sinon.stub().callsArgWith(1, 'dummy error'); server.scan({}, function(err) { should.exist(err); err.toString().should.equal('dummy error'); server.getWallet({}, function(err, wallet) { should.not.exist(err); wallet.scanStatus.should.equal('error'); wallet.addressManager.receiveAddressIndex.should.equal(1); wallet.addressManager.changeAddressIndex.should.equal(0); server.createAddress({}, function(err, address) { should.not.exist(err); address.path.should.equal('m/0/1'); done(); }); }); }); }); }); it('should restore wallet balance', function(done) { async.waterfall([ function(next) { helpers.stubUtxos(server, wallet, [1, 2, 3], function(utxos) { should.exist(utxos); helpers.stubAddressActivity(_.pluck(utxos, 'address')); server.getBalance({}, function(err, balance) { balance.totalAmount.should.equal(helpers.toSatoshi(6)); next(null, server, wallet); }); }); }, function(server, wallet, next) { server.removeWallet({}, function(err) { next(err); }); }, function(next) { // NOTE: this works because it creates the exact same wallet! helpers.createAndJoinWallet(1, 1, function(server, wallet) { server.getBalance({}, function(err, balance) { balance.totalAmount.should.equal(0); next(null, server, wallet); }); }); }, function(server, wallet, next) { server.scan({}, function(err) { should.not.exist(err); server.getBalance(wallet.id, function(err, balance) { balance.totalAmount.should.equal(helpers.toSatoshi(6)); next(); }) }); }, ], function(err) { should.not.exist(err); done(); }); }); it('should abort scan if there is an error checking address activity', function(done) { blockchainExplorer.getAddressActivity = sinon.stub().callsArgWith(1, 'dummy error'); server.scan({}, function(err) { should.exist(err); err.toString().should.equal('dummy error'); server.getWallet({}, function(err, wallet) { should.not.exist(err); wallet.scanStatus.should.equal('error'); wallet.addressManager.receiveAddressIndex.should.equal(0); wallet.addressManager.changeAddressIndex.should.equal(0); server.storage.fetchAddresses(wallet.id, function(err, addresses) { should.not.exist(err); addresses.should.be.empty; done(); }); }); }); }); }); describe('shared wallet (BIP45)', function() { beforeEach(function(done) { this.timeout(5000); WalletService.SCAN_CONFIG.maxGap = 2; helpers.createAndJoinWallet(1, 2, { supportBIP44AndP2PKH: false }, function(s, w) { server = s; wallet = w; done(); }); }); afterEach(function() { WalletService.SCAN_CONFIG = scanConfigOld; }); it('should scan main addresses', function(done) { helpers.stubAddressActivity( ['39AA1Y2VvPJhV3RFbc7cKbUax1WgkPwweR', // m/2147483647/0/0 '3QX2MNSijnhCALBmUVnDo5UGPj3SEGASWx', // m/2147483647/0/2 '3MzGaz4KKX66w8ShKaR536ZqzVvREBqqYu', // m/2147483647/1/0 ]); var expectedPaths = [ 'm/2147483647/0/0', 'm/2147483647/0/1', 'm/2147483647/0/2', 'm/2147483647/1/0', ]; server.scan({}, function(err) { should.not.exist(err); server.getWallet({}, function(err, wallet) { should.not.exist(err); wallet.scanStatus.should.equal('success'); server.storage.fetchAddresses(wallet.id, function(err, addresses) { should.exist(addresses); addresses.length.should.equal(expectedPaths.length); var paths = _.pluck(addresses, 'path'); _.difference(paths, expectedPaths).length.should.equal(0); server.createAddress({}, function(err, address) { should.not.exist(err); address.path.should.equal('m/2147483647/0/3'); done(); }); }); }); }); }); it('should scan main addresses & copayer addresses', function(done) { helpers.stubAddressActivity( ['39AA1Y2VvPJhV3RFbc7cKbUax1WgkPwweR', // m/2147483647/0/0 '3MzGaz4KKX66w8ShKaR536ZqzVvREBqqYu', // m/2147483647/1/0 '3BYoynejwBH9q4Jhr9m9P5YTnLTu57US6g', // m/0/0/1 '37Pb8c32hzm16tCZaVHj4Dtjva45L2a3A3', // m/1/1/0 '32TB2n283YsXdseMqUm9zHSRcfS5JxTWxx', // m/1/0/0 ]); var expectedPaths = [ 'm/2147483647/0/0', 'm/2147483647/1/0', 'm/0/0/0', 'm/0/0/1', 'm/1/0/0', 'm/1/1/0', ]; server.scan({ includeCopayerBranches: true }, function(err) { should.not.exist(err); server.storage.fetchAddresses(wallet.id, function(err, addresses) { should.exist(addresses); addresses.length.should.equal(expectedPaths.length); var paths = _.pluck(addresses, 'path'); _.difference(paths, expectedPaths).length.should.equal(0); done(); }) }); }); }); }); describe('#startScan', function() { var server, wallet; var scanConfigOld = WalletService.SCAN_CONFIG; beforeEach(function(done) { this.timeout(5000); WalletService.SCAN_CONFIG.maxGap = 2; helpers.createAndJoinWallet(1, 1, { supportBIP44AndP2PKH: false }, function(s, w) { server = s; wallet = w; done(); }); }); afterEach(function() { WalletService.SCAN_CONFIG = scanConfigOld; server.messageBroker.removeAllListeners(); }); it('should start an asynchronous scan', function(done) { helpers.stubAddressActivity( ['3GvvHimEMk2GBZnPxTF89GHZL6QhZjUZVs', // m/2147483647/0/0 '37pd1jjTUiGBh8JL2hKLDgsyrhBoiz5vsi', // m/2147483647/0/2 '3C3tBn8Sr1wHTp2brMgYsj9ncB7R7paYuB', // m/2147483647/1/0 ]); var expectedPaths = [ 'm/2147483647/0/0', 'm/2147483647/0/1', 'm/2147483647/0/2', 'm/2147483647/1/0', ]; server.messageBroker.onMessage(function(n) { if (n.type == 'ScanFinished') { server.getWallet({}, function(err, wallet) { should.exist(wallet.scanStatus); wallet.scanStatus.should.equal('success'); should.not.exist(n.creatorId); server.storage.fetchAddresses(wallet.id, function(err, addresses) { should.exist(addresses); addresses.length.should.equal(expectedPaths.length); var paths = _.pluck(addresses, 'path'); _.difference(paths, expectedPaths).length.should.equal(0); server.createAddress({}, function(err, address) { should.not.exist(err); address.path.should.equal('m/2147483647/0/3'); done(); }); }) }); } }); server.startScan({}, function(err) { should.not.exist(err); }); }); it('should set scan status error when unable to reach blockchain', function(done) { blockchainExplorer.getAddressActivity = sinon.stub().yields('dummy error'); server.messageBroker.onMessage(function(n) { if (n.type == 'ScanFinished') { should.exist(n.data.error); server.getWallet({}, function(err, wallet) { should.exist(wallet.scanStatus); wallet.scanStatus.should.equal('error'); done(); }); } }); server.startScan({}, function(err) { should.not.exist(err); }); }); it('should start multiple asynchronous scans for different wallets', function(done) { helpers.stubAddressActivity(['3K2VWMXheGZ4qG35DyGjA2dLeKfaSr534A']); WalletService.SCAN_CONFIG.scanWindow = 1; var scans = 0; server.messageBroker.onMessage(function(n) { if (n.type == 'ScanFinished') { scans++; if (scans == 2) done(); } }); // Create a second wallet var server2 = new WalletService(); var opts = { name: 'second wallet', m: 1, n: 1, pubKey: TestData.keyPair.pub, }; server2.createWallet(opts, function(err, walletId) { should.not.exist(err); var copayerOpts = helpers.getSignedCopayerOpts({ walletId: walletId, name: 'copayer 1', xPubKey: TestData.copayers[3].xPubKey_45H, requestPubKey: TestData.copayers[3].pubKey_1H_0, }); server.joinWallet(copayerOpts, function(err, result) { should.not.exist(err); helpers.getAuthServer(result.copayerId, function(server2) { server.startScan({}, function(err) { should.not.exist(err); scans.should.equal(0); }); server2.startScan({}, function(err) { should.not.exist(err); scans.should.equal(0); }); scans.should.equal(0); }); }); }); }); }); describe('Legacy', function() { describe('Fees', function() { var server, wallet; beforeEach(function(done) { helpers.createAndJoinWallet(2, 3, function(s, w) { server = s; wallet = w; done(); }); }); it('should create a tx from legacy (bwc-0.0.*) client', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); var verifyStub = sinon.stub(WalletService.prototype, '_verifySignature'); verifyStub.returns(true); WalletService.getInstanceWithAuth({ copayerId: wallet.copayers[0].id, message: 'dummy', signature: 'dummy', clientVersion: 'bwc-0.0.40', }, function(err, server) { should.not.exist(err); should.exist(server); verifyStub.restore(); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); tx.amount.should.equal(helpers.toSatoshi(80)); tx.fee.should.equal(WalletUtils.DEFAULT_FEE_PER_KB); done(); }); }); }); }); it('should not return error when fetching new txps from legacy (bwc-0.0.*) client', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); var verifyStub = sinon.stub(WalletService.prototype, '_verifySignature'); verifyStub.returns(true); WalletService.getInstanceWithAuth({ copayerId: wallet.copayers[0].id, message: 'dummy', signature: 'dummy', clientVersion: 'bwc-0.0.40', }, function(err, server) { verifyStub.restore(); should.not.exist(err); should.exist(server); server.getPendingTxs({}, function(err, txps) { should.not.exist(err); should.exist(txps); done(); }); }); }); }); }); it('should fail to sign tx from legacy (bwc-0.0.*) client', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); _.startsWith(tx.version, '1.').should.be.false; var verifyStub = sinon.stub(WalletService.prototype, '_verifySignature'); verifyStub.returns(true); WalletService.getInstanceWithAuth({ copayerId: wallet.copayers[0].id, message: 'dummy', signature: 'dummy', clientVersion: 'bwc-0.0.40', }, function(err, server) { var signatures = helpers.clientSign(tx, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: tx.id, signatures: signatures, }, function(err) { verifyStub.restore(); should.exist(err); err.code.should.equal('UPGRADE_NEEDED'); err.message.should.contain('sign this spend proposal'); done(); }); }); }); }); }); it('should create a tx from legacy (bwc-0.0.*) client and sign it from newer client', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); var verifyStub = sinon.stub(WalletService.prototype, '_verifySignature'); verifyStub.returns(true); WalletService.getInstanceWithAuth({ copayerId: wallet.copayers[0].id, message: 'dummy', signature: 'dummy', clientVersion: 'bwc-0.0.40', }, function(err, server) { should.not.exist(err); should.exist(server); verifyStub.restore(); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); tx.amount.should.equal(helpers.toSatoshi(80)); tx.fee.should.equal(WalletUtils.DEFAULT_FEE_PER_KB); helpers.getAuthServer(wallet.copayers[0].id, function(server) { var signatures = helpers.clientSign(tx, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: tx.id, signatures: signatures, }, function(err) { should.not.exist(err); done(); }); }); }); }); }); }); it('should fail with insufficient fee when invoked from legacy (bwc-0.0.*) client', function(done) { helpers.stubUtxos(server, wallet, 1, function() { var verifyStub = sinon.stub(WalletService.prototype, '_verifySignature'); verifyStub.returns(true); WalletService.getInstanceWithAuth({ copayerId: wallet.copayers[0].id, message: 'dummy', signature: 'dummy', clientVersion: 'bwc-0.0.40', }, function(err, server) { should.not.exist(err); should.exist(server); verifyStub.restore(); var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.99995, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.exist(err); err.code.should.equal('INSUFFICIENT_FUNDS_FOR_FEE'); var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.99995, TestData.copayers[0].privKey_1H_0, { feePerKb: 5000 }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); tx.fee.should.equal(5000); // Sign it to make sure Bitcore doesn't complain about the fees var signatures = helpers.clientSign(tx, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: tx.id, signatures: signatures, }, function(err) { should.not.exist(err); done(); }); }); }); }); }); }); }); }); });
test/integration/server.js
'use strict'; var _ = require('lodash'); var async = require('async'); var inspect = require('util').inspect; var chai = require('chai'); var sinon = require('sinon'); var should = chai.should(); var log = require('npmlog'); log.debug = log.verbose; var fs = require('fs'); var tingodb = require('tingodb')({ memStore: true }); var Utils = require('../../lib/utils'); var WalletUtils = require('bitcore-wallet-utils'); var Bitcore = WalletUtils.Bitcore; var Storage = require('../../lib/storage'); var Model = require('../../lib/model'); var WalletService = require('../../lib/server'); var EmailService = require('../../lib/emailservice'); var TestData = require('../testdata'); var CLIENT_VERSION = 'bwc-0.1.1'; var helpers = {}; helpers.getAuthServer = function(copayerId, cb) { var verifyStub = sinon.stub(WalletService.prototype, '_verifySignature'); verifyStub.returns(true); WalletService.getInstanceWithAuth({ copayerId: copayerId, message: 'dummy', signature: 'dummy', clientVersion: 'bwc-0.1.0', }, function(err, server) { verifyStub.restore(); if (err || !server) throw new Error('Could not login as copayerId ' + copayerId); return cb(server); }); }; helpers._generateCopayersTestData = function(n) { console.log('var copayers = ['); _.each(_.range(n), function(c) { var xpriv = new Bitcore.HDPrivateKey(); var xpub = Bitcore.HDPublicKey(xpriv); var xpriv_45H = xpriv.derive(45, true); var xpub_45H = Bitcore.HDPublicKey(xpriv_45H); var id45 = WalletUtils.xPubToCopayerId(xpub_45H.toString()); var xpriv_44H_0H_0H = xpriv.derive(44, true).derive(0, true).derive(0, true); var xpub_44H_0H_0H = Bitcore.HDPublicKey(xpriv_44H_0H_0H); var id44 = WalletUtils.xPubToCopayerId(xpub_44H_0H_0H.toString()); var xpriv_1H = xpriv.derive(1, true); var xpub_1H = Bitcore.HDPublicKey(xpriv_1H); var priv = xpriv_1H.derive(0).privateKey; var pub = xpub_1H.derive(0).publicKey; console.log('{id44: ', "'" + id44 + "',"); console.log('id45: ', "'" + id45 + "',"); console.log('xPrivKey: ', "'" + xpriv.toString() + "',"); console.log('xPubKey: ', "'" + xpub.toString() + "',"); console.log('xPrivKey_45H: ', "'" + xpriv_45H.toString() + "',"); console.log('xPubKey_45H: ', "'" + xpub_45H.toString() + "',"); console.log('xPrivKey_44H_0H_0H: ', "'" + xpriv_44H_0H_0H.toString() + "',"); console.log('xPubKey_44H_0H_0H: ', "'" + xpub_44H_0H_0H.toString() + "',"); console.log('xPrivKey_1H: ', "'" + xpriv_1H.toString() + "',"); console.log('xPubKey_1H: ', "'" + xpub_1H.toString() + "',"); console.log('privKey_1H_0: ', "'" + priv.toString() + "',"); console.log('pubKey_1H_0: ', "'" + pub.toString() + "'},"); }); console.log('];'); }; helpers.getSignedCopayerOpts = function(opts) { var hash = WalletUtils.getCopayerHash(opts.name, opts.xPubKey, opts.requestPubKey); opts.copayerSignature = WalletUtils.signMessage(hash, TestData.keyPair.priv); return opts; }; helpers.createAndJoinWallet = function(m, n, opts, cb) { if (_.isFunction(opts)) { cb = opts; opts = {}; } opts = opts || {}; var server = new WalletService(); var copayerIds = []; var offset = opts.offset || 0; var walletOpts = { name: 'a wallet', m: m, n: n, pubKey: TestData.keyPair.pub, }; if (_.isBoolean(opts.supportBIP44AndP2PKH)) walletOpts.supportBIP44AndP2PKH = opts.supportBIP44AndP2PKH; server.createWallet(walletOpts, function(err, walletId) { if (err) return cb(err); async.each(_.range(n), function(i, cb) { var copayerData = TestData.copayers[i + offset]; var copayerOpts = helpers.getSignedCopayerOpts({ walletId: walletId, name: 'copayer ' + (i + 1), xPubKey: (_.isBoolean(opts.supportBIP44AndP2PKH) && !opts.supportBIP44AndP2PKH) ? copayerData.xPubKey_45H : copayerData.xPubKey_44H_0H_0H, requestPubKey: copayerData.pubKey_1H_0, customData: 'custom data ' + (i + 1), }); if (_.isBoolean(opts.supportBIP44AndP2PKH)) copayerOpts.supportBIP44AndP2PKH = opts.supportBIP44AndP2PKH; server.joinWallet(copayerOpts, function(err, result) { should.not.exist(err); copayerIds.push(result.copayerId); return cb(err); }); }, function(err) { if (err) return new Error('Could not generate wallet'); helpers.getAuthServer(copayerIds[0], function(s) { s.getWallet({}, function(err, w) { cb(s, w); }); }); }); }); }; helpers.randomTXID = function() { return Bitcore.crypto.Hash.sha256(new Buffer(Math.random() * 100000)).toString('hex');; }; helpers.toSatoshi = function(btc) { if (_.isArray(btc)) { return _.map(btc, helpers.toSatoshi); } else { return Utils.strip(btc * 1e8); } }; helpers.stubUtxos = function(server, wallet, amounts, cb) { async.mapSeries(_.range(0, amounts.length > 2 ? 2 : 1), function(i, next) { server.createAddress({}, next); }, function(err, addresses) { should.not.exist(err); addresses.should.not.be.empty; var utxos = _.map([].concat(amounts), function(amount, i) { var address = addresses[i % addresses.length]; var confirmations; if (_.isString(amount) && _.startsWith(amount, 'u')) { amount = parseFloat(amount.substring(1)); confirmations = 0; } else { confirmations = Math.floor(Math.random() * 100 + 1); } var scriptPubKey; switch (wallet.addressType) { case WalletUtils.SCRIPT_TYPES.P2SH: scriptPubKey = Bitcore.Script.buildMultisigOut(address.publicKeys, wallet.m).toScriptHashOut(); break; case WalletUtils.SCRIPT_TYPES.P2PKH: scriptPubKey = Bitcore.Script.buildPublicKeyHashOut(address.address); break; } should.exist(scriptPubKey); return { txid: helpers.randomTXID(), vout: Math.floor(Math.random() * 10 + 1), satoshis: helpers.toSatoshi(amount).toString(), scriptPubKey: scriptPubKey.toBuffer().toString('hex'), address: address.address, confirmations: confirmations, }; }); blockchainExplorer.getUnspentUtxos = function(addresses, cb) { var selected = _.filter(utxos, function(utxo) { return _.contains(addresses, utxo.address); }); return cb(null, selected); }; return cb(utxos); }); }; helpers.stubBroadcast = function(thirdPartyBroadcast) { blockchainExplorer.broadcast = sinon.stub().callsArgWith(1, null, '112233'); blockchainExplorer.getTransaction = sinon.stub().callsArgWith(1, null, null); }; helpers.stubHistory = function(txs) { blockchainExplorer.getTransactions = function(addresses, from, to, cb) { var MAX_BATCH_SIZE = 100; var nbTxs = txs.length; if (_.isUndefined(from) && _.isUndefined(to)) { from = 0; to = MAX_BATCH_SIZE; } if (!_.isUndefined(from) && _.isUndefined(to)) to = from + MAX_BATCH_SIZE; if (!_.isUndefined(from) && !_.isUndefined(to) && to - from > MAX_BATCH_SIZE) to = from + MAX_BATCH_SIZE; if (from < 0) from = 0; if (to < 0) to = 0; if (from > nbTxs) from = nbTxs; if (to > nbTxs) to = nbTxs; var page = txs.slice(from, to); return cb(null, page); }; }; helpers.stubFeeLevels = function(levels) { blockchainExplorer.estimateFee = function(nbBlocks, cb) { var result = _.zipObject(_.map(_.pick(levels, nbBlocks), function(fee, n) { return [+n, fee > 0 ? fee / 1e8 : fee]; })); return cb(null, result); }; }; helpers.stubAddressActivity = function(activeAddresses) { blockchainExplorer.getAddressActivity = function(address, cb) { return cb(null, _.contains(activeAddresses, address)); }; }; helpers.clientSign = WalletUtils.signTxp; helpers.createProposalOptsLegacy = function(toAddress, amount, message, signingKey, feePerKb) { var opts = { toAddress: toAddress, amount: helpers.toSatoshi(amount), message: message, proposalSignature: null, }; if (feePerKb) opts.feePerKb = feePerKb; var hash = WalletUtils.getProposalHash(toAddress, opts.amount, message); try { opts.proposalSignature = WalletUtils.signMessage(hash, signingKey); } catch (ex) {} return opts; }; helpers.createSimpleProposalOpts = function(toAddress, amount, signingKey, opts) { var outputs = [{ toAddress: toAddress, amount: amount, }]; return helpers.createProposalOpts(Model.TxProposal.Types.SIMPLE, outputs, signingKey, opts); }; helpers.createProposalOpts = function(type, outputs, signingKey, moreOpts) { _.each(outputs, function(output) { output.amount = helpers.toSatoshi(output.amount); }); var opts = { type: type, proposalSignature: null }; if (moreOpts) { moreOpts = _.chain(moreOpts) .pick(['feePerKb', 'customData', 'message']) .value(); opts = _.assign(opts, moreOpts); } opts = _.defaults(opts, { message: null }); var hash; if (type == Model.TxProposal.Types.SIMPLE) { opts.toAddress = outputs[0].toAddress; opts.amount = outputs[0].amount; hash = WalletUtils.getProposalHash(opts.toAddress, opts.amount, opts.message, opts.payProUrl); } else if (type == Model.TxProposal.Types.MULTIPLEOUTPUTS) { opts.outputs = outputs; var header = { outputs: outputs, message: opts.message, payProUrl: opts.payProUrl }; hash = WalletUtils.getProposalHash(header); } try { opts.proposalSignature = WalletUtils.signMessage(hash, signingKey); } catch (ex) {} return opts; }; helpers.createAddresses = function(server, wallet, main, change, cb) { async.map(_.range(main + change), function(i, next) { var address = wallet.createAddress(i >= main); server.storage.storeAddressAndWallet(wallet, address, function(err) { if (err) return next(err); next(null, address); }); }, function(err, addresses) { if (err) throw new Error('Could not generate addresses'); return cb(_.take(addresses, main), _.takeRight(addresses, change)); }); }; var storage, blockchainExplorer; var useMongoDb = !!process.env.USE_MONGO_DB; function initStorage(cb) { function getDb(cb) { if (useMongoDb) { var mongodb = require('mongodb'); mongodb.MongoClient.connect('mongodb://localhost:27017/bws_test', function(err, db) { if (err) throw err; return cb(db); }); } else { var db = new tingodb.Db('./db/test', {}); return cb(db); } } getDb(function(db) { storage = new Storage({ db: db }); return cb(); }); }; function resetStorage(cb) { if (!storage.db) return cb(); storage.db.dropDatabase(function(err) { return cb(); }); }; describe('Wallet service', function() { before(function(done) { initStorage(done); }); beforeEach(function(done) { resetStorage(function() { blockchainExplorer = sinon.stub(); WalletService.initialize({ storage: storage, blockchainExplorer: blockchainExplorer, }, done); }); }); after(function(done) { WalletService.shutDown(done); }); describe('Email notifications', function() { var server, wallet, mailerStub, emailService; describe('Shared wallet', function() { beforeEach(function(done) { helpers.createAndJoinWallet(2, 3, function(s, w) { server = s; wallet = w; var i = 0; async.eachSeries(w.copayers, function(copayer, next) { helpers.getAuthServer(copayer.id, function(server) { server.savePreferences({ email: 'copayer' + (++i) + '@domain.com', unit: 'bit', }, next); }); }, function(err) { should.not.exist(err); mailerStub = sinon.stub(); mailerStub.sendMail = sinon.stub(); mailerStub.sendMail.yields(); emailService = new EmailService(); emailService.start({ lockOpts: {}, messageBroker: server.messageBroker, storage: storage, mailer: mailerStub, emailOpts: { from: '[email protected]', subjectPrefix: '[test wallet]', publicTxUrlTemplate: { livenet: 'https://insight.bitpay.com/tx/{{txid}}', testnet: 'https://test-insight.bitpay.com/tx/{{txid}}', }, }, }, function(err) { should.not.exist(err); done(); }); }); }); }); it('should notify copayers a new tx proposal has been created', function(done) { var _readTemplateFile_old = emailService._readTemplateFile; emailService._readTemplateFile = function(language, filename, cb) { if (_.endsWith(filename, '.html')) { return cb(null, '<html><body>{{walletName}}</body></html>'); } else { _readTemplateFile_old.call(emailService, language, filename, cb); } }; helpers.stubUtxos(server, wallet, [1, 1], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.8, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); setTimeout(function() { var calls = mailerStub.sendMail.getCalls(); calls.length.should.equal(2); var emails = _.map(calls, function(c) { return c.args[0]; }); _.difference(['[email protected]', '[email protected]'], _.pluck(emails, 'to')).should.be.empty; var one = emails[0]; one.from.should.equal('[email protected]'); one.subject.should.contain('New payment proposal'); one.text.should.contain(wallet.name); one.text.should.contain(wallet.copayers[0].name); should.exist(one.html); one.html.indexOf('<html>').should.equal(0); one.html.should.contain(wallet.name); server.storage.fetchUnsentEmails(function(err, unsent) { should.not.exist(err); unsent.should.be.empty; emailService._readTemplateFile = _readTemplateFile_old; done(); }); }, 100); }); }); }); it('should not send email if unable to apply template to notification', function(done) { var _applyTemplate_old = emailService._applyTemplate; emailService._applyTemplate = function(template, data, cb) { _applyTemplate_old.call(emailService, template, undefined, cb); }; helpers.stubUtxos(server, wallet, [1, 1], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.8, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); setTimeout(function() { var calls = mailerStub.sendMail.getCalls(); calls.length.should.equal(0); server.storage.fetchUnsentEmails(function(err, unsent) { should.not.exist(err); unsent.should.be.empty; emailService._applyTemplate = _applyTemplate_old; done(); }); }, 100); }); }); }); it('should notify copayers a new outgoing tx has been created', function(done) { var _readTemplateFile_old = emailService._readTemplateFile; emailService._readTemplateFile = function(language, filename, cb) { if (_.endsWith(filename, '.html')) { return cb(null, '<html>{{&urlForTx}}<html>'); } else { _readTemplateFile_old.call(emailService, language, filename, cb); } }; helpers.stubUtxos(server, wallet, [1, 1], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.8, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); var txp; async.waterfall([ function(next) { server.createTx(txOpts, next); }, function(t, next) { txp = t; async.eachSeries(_.range(2), function(i, next) { var copayer = TestData.copayers[i]; helpers.getAuthServer(copayer.id44, function(server) { var signatures = helpers.clientSign(txp, copayer.xPrivKey); server.signTx({ txProposalId: txp.id, signatures: signatures, }, function(err, t) { txp = t; next(); }); }); }, next); }, function(next) { helpers.stubBroadcast(); server.broadcastTx({ txProposalId: txp.id, }, next); }, ], function(err) { should.not.exist(err); setTimeout(function() { var calls = mailerStub.sendMail.getCalls(); var emails = _.map(_.takeRight(calls, 3), function(c) { return c.args[0]; }); _.difference(['[email protected]', '[email protected]', '[email protected]'], _.pluck(emails, 'to')).should.be.empty; var one = emails[0]; one.from.should.equal('[email protected]'); one.subject.should.contain('Payment sent'); one.text.should.contain(wallet.name); one.text.should.contain('800,000'); should.exist(one.html); one.html.should.contain('https://insight.bitpay.com/tx/' + txp.txid); server.storage.fetchUnsentEmails(function(err, unsent) { should.not.exist(err); unsent.should.be.empty; emailService._readTemplateFile = _readTemplateFile_old; done(); }); }, 100); }); }); }); it('should notify copayers a tx has been finally rejected', function(done) { helpers.stubUtxos(server, wallet, 1, function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.8, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); var txpId; async.waterfall([ function(next) { server.createTx(txOpts, next); }, function(txp, next) { txpId = txp.id; async.eachSeries(_.range(1, 3), function(i, next) { var copayer = TestData.copayers[i]; helpers.getAuthServer(copayer.id44, function(server) { server.rejectTx({ txProposalId: txp.id, }, next); }); }, next); }, ], function(err) { should.not.exist(err); setTimeout(function() { var calls = mailerStub.sendMail.getCalls(); var emails = _.map(_.takeRight(calls, 2), function(c) { return c.args[0]; }); _.difference(['[email protected]', '[email protected]'], _.pluck(emails, 'to')).should.be.empty; var one = emails[0]; one.from.should.equal('[email protected]'); one.subject.should.contain('Payment proposal rejected'); one.text.should.contain(wallet.name); one.text.should.contain('copayer 2, copayer 3'); one.text.should.not.contain('copayer 1'); server.storage.fetchUnsentEmails(function(err, unsent) { should.not.exist(err); unsent.should.be.empty; done(); }); }, 100); }); }); }); it('should notify copayers of incoming txs', function(done) { server.createAddress({}, function(err, address) { should.not.exist(err); // Simulate incoming tx notification server._notify('NewIncomingTx', { txid: '999', address: address, amount: 12300000, }, function(err) { setTimeout(function() { var calls = mailerStub.sendMail.getCalls(); calls.length.should.equal(3); var emails = _.map(calls, function(c) { return c.args[0]; }); _.difference(['[email protected]', '[email protected]', '[email protected]'], _.pluck(emails, 'to')).should.be.empty; var one = emails[0]; one.from.should.equal('[email protected]'); one.subject.should.contain('New payment received'); one.text.should.contain(wallet.name); one.text.should.contain('123,000'); server.storage.fetchUnsentEmails(function(err, unsent) { should.not.exist(err); unsent.should.be.empty; done(); }); }, 100); }); }); }); it('should notify each email address only once', function(done) { // Set same email address for copayer1 and copayer2 server.savePreferences({ email: '[email protected]', }, function(err) { server.createAddress({}, function(err, address) { should.not.exist(err); // Simulate incoming tx notification server._notify('NewIncomingTx', { txid: '999', address: address, amount: 12300000, }, function(err) { setTimeout(function() { var calls = mailerStub.sendMail.getCalls(); calls.length.should.equal(2); var emails = _.map(calls, function(c) { return c.args[0]; }); _.difference(['[email protected]', '[email protected]'], _.pluck(emails, 'to')).should.be.empty; var one = emails[0]; one.from.should.equal('[email protected]'); one.subject.should.contain('New payment received'); one.text.should.contain(wallet.name); one.text.should.contain('123,000'); server.storage.fetchUnsentEmails(function(err, unsent) { should.not.exist(err); unsent.should.be.empty; done(); }); }, 100); }); }); }); }); it('should build each email using preferences of the copayers', function(done) { // Set same email address for copayer1 and copayer2 server.savePreferences({ email: '[email protected]', language: 'es', unit: 'btc', }, function(err) { server.createAddress({}, function(err, address) { should.not.exist(err); // Simulate incoming tx notification server._notify('NewIncomingTx', { txid: '999', address: address, amount: 12300000, }, function(err) { setTimeout(function() { var calls = mailerStub.sendMail.getCalls(); calls.length.should.equal(3); var emails = _.map(calls, function(c) { return c.args[0]; }); var spanish = _.find(emails, { to: '[email protected]' }); spanish.from.should.equal('[email protected]'); spanish.subject.should.contain('Nuevo pago recibido'); spanish.text.should.contain(wallet.name); spanish.text.should.contain('0.123 BTC'); var english = _.find(emails, { to: '[email protected]' }); english.from.should.equal('[email protected]'); english.subject.should.contain('New payment received'); english.text.should.contain(wallet.name); english.text.should.contain('123,000 bits'); done(); }, 100); }); }); }); }); it('should support multiple emailservice instances running concurrently', function(done) { var emailService2 = new EmailService(); emailService2.start({ lock: emailService.lock, // Use same locker service messageBroker: server.messageBroker, storage: storage, mailer: mailerStub, emailOpts: { from: '[email protected]', subjectPrefix: '[test wallet 2]', }, }, function(err) { helpers.stubUtxos(server, wallet, 1, function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.8, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); setTimeout(function() { var calls = mailerStub.sendMail.getCalls(); calls.length.should.equal(2); server.storage.fetchUnsentEmails(function(err, unsent) { should.not.exist(err); unsent.should.be.empty; done(); }); }, 100); }); }); }); }); }); describe('1-of-N wallet', function() { beforeEach(function(done) { helpers.createAndJoinWallet(1, 2, function(s, w) { server = s; wallet = w; var i = 0; async.eachSeries(w.copayers, function(copayer, next) { helpers.getAuthServer(copayer.id, function(server) { server.savePreferences({ email: 'copayer' + (++i) + '@domain.com', unit: 'bit', }, next); }); }, function(err) { should.not.exist(err); mailerStub = sinon.stub(); mailerStub.sendMail = sinon.stub(); mailerStub.sendMail.yields(); emailService = new EmailService(); emailService.start({ lockOpts: {}, messageBroker: server.messageBroker, storage: storage, mailer: mailerStub, emailOpts: { from: '[email protected]', subjectPrefix: '[test wallet]', publicTxUrlTemplate: { livenet: 'https://insight.bitpay.com/tx/{{txid}}', testnet: 'https://test-insight.bitpay.com/tx/{{txid}}', }, }, }, function(err) { should.not.exist(err); done(); }); }); }); }); it('should NOT notify copayers a new tx proposal has been created', function(done) { helpers.stubUtxos(server, wallet, [1, 1], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.8, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); setTimeout(function() { var calls = mailerStub.sendMail.getCalls(); calls.length.should.equal(0); done(); }, 100); }); }); }); }); }); describe('#getServiceVersion', function() { it('should get version from package', function() { WalletService.getServiceVersion().should.equal('bws-' + require('../../package').version); }); }); describe('#getInstance', function() { it('should get server instance', function() { var server = WalletService.getInstance({ clientVersion: 'bwc-0.0.1', }); server.clientVersion.should.equal('bwc-0.0.1'); }); }); describe('#getInstanceWithAuth', function() { it('should get server instance for existing copayer', function(done) { helpers.createAndJoinWallet(1, 2, function(s, wallet) { var xpriv = TestData.copayers[0].xPrivKey; var priv = TestData.copayers[0].privKey_1H_0; var sig = WalletUtils.signMessage('hello world', priv); WalletService.getInstanceWithAuth({ copayerId: wallet.copayers[0].id, message: 'hello world', signature: sig, clientVersion: 'bwc-0.0.1', }, function(err, server) { should.not.exist(err); server.walletId.should.equal(wallet.id); server.copayerId.should.equal(wallet.copayers[0].id); server.clientVersion.should.equal('bwc-0.0.1'); done(); }); }); }); it('should fail when requesting for non-existent copayer', function(done) { var message = 'hello world'; var opts = { copayerId: 'dummy', message: message, signature: WalletUtils.signMessage(message, TestData.copayers[0].privKey_1H_0), }; WalletService.getInstanceWithAuth(opts, function(err, server) { err.code.should.equal('NOT_AUTHORIZED'); err.message.should.contain('Copayer not found'); done(); }); }); it('should fail when message signature cannot be verified', function(done) { helpers.createAndJoinWallet(1, 2, function(s, wallet) { WalletService.getInstanceWithAuth({ copayerId: wallet.copayers[0].id, message: 'dummy', signature: 'dummy', }, function(err, server) { err.code.should.equal('NOT_AUTHORIZED'); err.message.should.contain('Invalid signature'); done(); }); }); }); }); describe('#createWallet', function() { var server; beforeEach(function() { server = new WalletService(); }); it('should create and store wallet', function(done) { var opts = { name: 'my wallet', m: 2, n: 3, pubKey: TestData.keyPair.pub, }; server.createWallet(opts, function(err, walletId) { should.not.exist(err); server.storage.fetchWallet(walletId, function(err, wallet) { should.not.exist(err); wallet.id.should.equal(walletId); wallet.name.should.equal('my wallet'); done(); }); }); }); it('should create wallet with given id', function(done) { var opts = { name: 'my wallet', m: 2, n: 3, pubKey: TestData.keyPair.pub, id: '1234', }; server.createWallet(opts, function(err, walletId) { should.not.exist(err); server.storage.fetchWallet('1234', function(err, wallet) { should.not.exist(err); wallet.id.should.equal(walletId); wallet.name.should.equal('my wallet'); done(); }); }); }); it('should fail to create wallets with same id', function(done) { var opts = { name: 'my wallet', m: 2, n: 3, pubKey: TestData.keyPair.pub, id: '1234', }; server.createWallet(opts, function(err, walletId) { server.createWallet(opts, function(err, walletId) { err.message.should.contain('Wallet already exists'); done(); }); }); }); it('should fail to create wallet with no name', function(done) { var opts = { name: '', m: 2, n: 3, pubKey: TestData.keyPair.pub, }; server.createWallet(opts, function(err, walletId) { should.not.exist(walletId); should.exist(err); err.message.should.contain('name'); done(); }); }); it('should fail to create wallet with invalid copayer pairs', function(done) { var invalidPairs = [{ m: 0, n: 0 }, { m: 0, n: 2 }, { m: 2, n: 1 }, { m: 0, n: 10 }, { m: 1, n: 20 }, { m: 10, n: 10 }, ]; var opts = { id: '123', name: 'my wallet', pubKey: TestData.keyPair.pub, }; async.each(invalidPairs, function(pair, cb) { opts.m = pair.m; opts.n = pair.n; server.createWallet(opts, function(err) { should.exist(err); err.message.should.equal('Invalid combination of required copayers / total copayers'); return cb(); }); }, function(err) { done(); }); }); it('should fail to create wallet with invalid pubKey argument', function(done) { var opts = { name: 'my wallet', m: 2, n: 3, pubKey: 'dummy', }; server.createWallet(opts, function(err, walletId) { should.not.exist(walletId); should.exist(err); err.message.should.contain('Invalid public key'); done(); }); }); }); describe('#joinWallet', function() { var server, walletId; beforeEach(function(done) { server = new WalletService(); var walletOpts = { name: 'my wallet', m: 1, n: 2, pubKey: TestData.keyPair.pub, }; server.createWallet(walletOpts, function(err, wId) { should.not.exist(err); walletId = wId; should.exist(walletId); done(); }); }); it('should join existing wallet', function(done) { var copayerOpts = helpers.getSignedCopayerOpts({ walletId: walletId, name: 'me', xPubKey: TestData.copayers[0].xPubKey_44H_0H_0H, requestPubKey: TestData.copayers[0].pubKey_1H_0, customData: 'dummy custom data', }); server.joinWallet(copayerOpts, function(err, result) { should.not.exist(err); var copayerId = result.copayerId; helpers.getAuthServer(copayerId, function(server) { server.getWallet({}, function(err, wallet) { wallet.id.should.equal(walletId); wallet.copayers.length.should.equal(1); var copayer = wallet.copayers[0]; copayer.name.should.equal('me'); copayer.id.should.equal(copayerId); copayer.customData.should.equal('dummy custom data'); server.getNotifications({}, function(err, notifications) { should.not.exist(err); var notif = _.find(notifications, { type: 'NewCopayer' }); should.exist(notif); notif.data.walletId.should.equal(walletId); notif.data.copayerId.should.equal(copayerId); notif.data.copayerName.should.equal('me'); notif = _.find(notifications, { type: 'WalletComplete' }); should.not.exist(notif); done(); }); }); }); }); }); it('should fail to join with no name', function(done) { var copayerOpts = helpers.getSignedCopayerOpts({ walletId: walletId, name: '', xPubKey: TestData.copayers[0].xPubKey_44H_0H_0H, requestPubKey: TestData.copayers[0].pubKey_1H_0, }); server.joinWallet(copayerOpts, function(err, result) { should.not.exist(result); should.exist(err); err.message.should.contain('name'); done(); }); }); it('should fail to join non-existent wallet', function(done) { var copayerOpts = { walletId: '123', name: 'me', xPubKey: 'dummy', requestPubKey: 'dummy', copayerSignature: 'dummy', }; server.joinWallet(copayerOpts, function(err) { should.exist(err); done(); }); }); it('should fail to join full wallet', function(done) { helpers.createAndJoinWallet(1, 1, function(s, wallet) { var copayerOpts = helpers.getSignedCopayerOpts({ walletId: wallet.id, name: 'me', xPubKey: TestData.copayers[1].xPubKey_44H_0H_0H, requestPubKey: TestData.copayers[1].pubKey_1H_0, }); server.joinWallet(copayerOpts, function(err) { should.exist(err); err.code.should.equal('WALLET_FULL'); err.message.should.equal('Wallet full'); done(); }); }); }); it('should return copayer in wallet error before full wallet', function(done) { helpers.createAndJoinWallet(1, 1, function(s, wallet) { var copayerOpts = helpers.getSignedCopayerOpts({ walletId: wallet.id, name: 'me', xPubKey: TestData.copayers[0].xPubKey_44H_0H_0H, requestPubKey: TestData.copayers[0].pubKey_1H_0, }); server.joinWallet(copayerOpts, function(err) { should.exist(err); err.code.should.equal('COPAYER_IN_WALLET'); done(); }); }); }); it('should fail to re-join wallet', function(done) { var copayerOpts = helpers.getSignedCopayerOpts({ walletId: walletId, name: 'me', xPubKey: TestData.copayers[0].xPubKey_44H_0H_0H, requestPubKey: TestData.copayers[0].pubKey_1H_0, }); server.joinWallet(copayerOpts, function(err) { should.not.exist(err); server.joinWallet(copayerOpts, function(err) { should.exist(err); err.code.should.equal('COPAYER_IN_WALLET'); err.message.should.equal('Copayer already in wallet'); done(); }); }); }); it('should be able to get wallet info without actually joining', function(done) { var copayerOpts = helpers.getSignedCopayerOpts({ walletId: walletId, name: 'me', xPubKey: TestData.copayers[0].xPubKey_44H_0H_0H, requestPubKey: TestData.copayers[0].pubKey_1H_0, customData: 'dummy custom data', dryRun: true, }); server.joinWallet(copayerOpts, function(err, result) { should.not.exist(err); should.exist(result); should.not.exist(result.copayerId); result.wallet.id.should.equal(walletId); result.wallet.m.should.equal(1); result.wallet.n.should.equal(2); result.wallet.copayers.should.be.empty; server.storage.fetchWallet(walletId, function(err, wallet) { should.not.exist(err); wallet.id.should.equal(walletId); wallet.copayers.should.be.empty; done(); }); }); }); it('should fail to join two wallets with same xPubKey', function(done) { var copayerOpts = helpers.getSignedCopayerOpts({ walletId: walletId, name: 'me', xPubKey: TestData.copayers[0].xPubKey_44H_0H_0H, requestPubKey: TestData.copayers[0].pubKey_1H_0, }); server.joinWallet(copayerOpts, function(err) { should.not.exist(err); var walletOpts = { name: 'my other wallet', m: 1, n: 1, pubKey: TestData.keyPair.pub, }; server.createWallet(walletOpts, function(err, walletId) { should.not.exist(err); copayerOpts = helpers.getSignedCopayerOpts({ walletId: walletId, name: 'me', xPubKey: TestData.copayers[0].xPubKey_44H_0H_0H, requestPubKey: TestData.copayers[0].pubKey_1H_0, }); server.joinWallet(copayerOpts, function(err) { should.exist(err); err.code.should.equal('COPAYER_REGISTERED'); err.message.should.equal('Copayer ID already registered on server'); done(); }); }); }); }); it('should fail to join with bad formated signature', function(done) { var copayerOpts = { walletId: walletId, name: 'me', xPubKey: TestData.copayers[0].xPubKey_44H_0H_0H, requestPubKey: TestData.copayers[0].pubKey_1H_0, copayerSignature: 'bad sign', }; server.joinWallet(copayerOpts, function(err) { err.message.should.equal('Bad request'); done(); }); }); it('should fail to join with null signature', function(done) { var copayerOpts = { walletId: walletId, name: 'me', xPubKey: TestData.copayers[0].xPubKey_44H_0H_0H, requestPubKey: TestData.copayers[0].pubKey_1H_0, }; server.joinWallet(copayerOpts, function(err) { should.exist(err); err.message.should.contain('argument missing'); done(); }); }); it('should fail to join with wrong signature', function(done) { var copayerOpts = helpers.getSignedCopayerOpts({ walletId: walletId, name: 'me', xPubKey: TestData.copayers[0].xPubKey_44H_0H_0H, requestPubKey: TestData.copayers[0].pubKey_1H_0, }); copayerOpts.name = 'me2'; server.joinWallet(copayerOpts, function(err) { err.message.should.equal('Bad request'); done(); }); }); it('should set pkr and status = complete on last copayer joining (2-3)', function(done) { helpers.createAndJoinWallet(2, 3, function(server) { server.getWallet({}, function(err, wallet) { should.not.exist(err); wallet.status.should.equal('complete'); wallet.publicKeyRing.length.should.equal(3); server.getNotifications({}, function(err, notifications) { should.not.exist(err); var notif = _.find(notifications, { type: 'WalletComplete' }); should.exist(notif); notif.data.walletId.should.equal(wallet.id); done(); }); }); }); }); it('should not notify WalletComplete if 1-of-1', function(done) { helpers.createAndJoinWallet(1, 1, function(server) { server.getNotifications({}, function(err, notifications) { should.not.exist(err); var notif = _.find(notifications, { type: 'WalletComplete' }); should.not.exist(notif); done(); }); }); }); }); describe('#joinWallet new/legacy clients', function() { var server; beforeEach(function() { server = new WalletService(); }); it('should fail to join legacy wallet from new client', function(done) { var walletOpts = { name: 'my wallet', m: 1, n: 2, pubKey: TestData.keyPair.pub, supportBIP44AndP2PKH: false, }; server.createWallet(walletOpts, function(err, walletId) { should.not.exist(err); should.exist(walletId); var copayerOpts = helpers.getSignedCopayerOpts({ walletId: walletId, name: 'me', xPubKey: TestData.copayers[0].xPubKey_44H_0H_0H, requestPubKey: TestData.copayers[0].pubKey_1H_0, }); server.joinWallet(copayerOpts, function(err, result) { should.exist(err); err.message.should.contain('The wallet you are trying to join was created with an older version of the client app'); done(); }); }); }); it('should fail to join new wallet from legacy client', function(done) { var walletOpts = { name: 'my wallet', m: 1, n: 2, pubKey: TestData.keyPair.pub, }; server.createWallet(walletOpts, function(err, walletId) { should.not.exist(err); should.exist(walletId); var copayerOpts = helpers.getSignedCopayerOpts({ walletId: walletId, name: 'me', xPubKey: TestData.copayers[0].xPubKey_45H, requestPubKey: TestData.copayers[0].pubKey_1H_0, supportBIP44AndP2PKH: false, }); server.joinWallet(copayerOpts, function(err, result) { should.exist(err); err.code.should.equal('UPGRADE_NEEDED'); done(); }); }); }); }); describe('Address derivation strategy', function() { var server; beforeEach(function() { server = WalletService.getInstance(); }); it('should use BIP44 & P2PKH for 1-of-1 wallet if supported', function(done) { var walletOpts = { name: 'my wallet', m: 1, n: 1, pubKey: TestData.keyPair.pub, }; server.createWallet(walletOpts, function(err, wid) { should.not.exist(err); server.storage.fetchWallet(wid, function(err, wallet) { should.not.exist(err); wallet.derivationStrategy.should.equal('BIP44'); wallet.addressType.should.equal('P2PKH'); done(); }); }); }); it('should use BIP45 & P2SH for 1-of-1 wallet if not supported', function(done) { var walletOpts = { name: 'my wallet', m: 1, n: 1, pubKey: TestData.keyPair.pub, supportBIP44AndP2PKH: false, }; server.createWallet(walletOpts, function(err, wid) { should.not.exist(err); server.storage.fetchWallet(wid, function(err, wallet) { should.not.exist(err); wallet.derivationStrategy.should.equal('BIP45'); wallet.addressType.should.equal('P2SH'); done(); }); }); }); it('should use BIP44 & P2SH for shared wallet if supported', function(done) { var walletOpts = { name: 'my wallet', m: 2, n: 3, pubKey: TestData.keyPair.pub, }; server.createWallet(walletOpts, function(err, wid) { should.not.exist(err); server.storage.fetchWallet(wid, function(err, wallet) { should.not.exist(err); wallet.derivationStrategy.should.equal('BIP44'); wallet.addressType.should.equal('P2SH'); done(); }); }); }); it('should use BIP45 & P2SH for shared wallet if supported', function(done) { var walletOpts = { name: 'my wallet', m: 2, n: 3, pubKey: TestData.keyPair.pub, supportBIP44AndP2PKH: false, }; server.createWallet(walletOpts, function(err, wid) { should.not.exist(err); server.storage.fetchWallet(wid, function(err, wallet) { should.not.exist(err); wallet.derivationStrategy.should.equal('BIP45'); wallet.addressType.should.equal('P2SH'); done(); }); }); }); }); describe('#getStatus', function() { var server, wallet; beforeEach(function(done) { helpers.createAndJoinWallet(1, 2, function(s, w) { server = s; wallet = w; done(); }); }); it('should get status', function(done) { server.getStatus({}, function(err, status) { should.not.exist(err); should.exist(status); should.exist(status.wallet); status.wallet.name.should.equal(wallet.name); should.exist(status.wallet.copayers); status.wallet.copayers.length.should.equal(2); should.exist(status.balance); status.balance.totalAmount.should.equal(0); should.exist(status.preferences); should.exist(status.pendingTxps); status.pendingTxps.should.be.empty; should.not.exist(status.wallet.publicKeyRing); should.not.exist(status.wallet.pubKey); should.not.exist(status.wallet.addressManager); _.each(status.wallet.copayers, function(copayer) { should.not.exist(copayer.xPubKey); should.not.exist(copayer.requestPubKey); should.not.exist(copayer.signature); should.not.exist(copayer.requestPubKey); should.not.exist(copayer.addressManager); should.not.exist(copayer.customData); }); done(); }); }); it('should get status including extended info', function(done) { server.getStatus({ includeExtendedInfo: true }, function(err, status) { should.not.exist(err); should.exist(status); should.exist(status.wallet.publicKeyRing); should.exist(status.wallet.pubKey); should.exist(status.wallet.addressManager); should.exist(status.wallet.copayers[0].xPubKey); should.exist(status.wallet.copayers[0].requestPubKey); should.exist(status.wallet.copayers[0].signature); should.exist(status.wallet.copayers[0].requestPubKey); should.exist(status.wallet.copayers[0].customData); // Do not return other copayer's custom data _.each(_.rest(status.wallet.copayers), function(copayer) { should.not.exist(copayer.customData); }); done(); }); }); it('should get status after tx creation', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); server.getStatus({}, function(err, status) { should.not.exist(err); status.pendingTxps.length.should.equal(1); var balance = status.balance; balance.totalAmount.should.equal(helpers.toSatoshi(300)); balance.lockedAmount.should.equal(tx.inputs[0].satoshis); balance.availableAmount.should.equal(balance.totalAmount - balance.lockedAmount); done(); }); }); }); }); }); describe('#verifyMessageSignature', function() { var server, wallet; beforeEach(function(done) { helpers.createAndJoinWallet(2, 3, function(s, w) { server = s; wallet = w; done(); }); }); it('should successfully verify message signature', function(done) { var message = 'hello world'; var opts = { message: message, signature: WalletUtils.signMessage(message, TestData.copayers[0].privKey_1H_0), }; server.verifyMessageSignature(opts, function(err, isValid) { should.not.exist(err); isValid.should.be.true; done(); }); }); it('should fail to verify message signature for different copayer', function(done) { var message = 'hello world'; var opts = { message: message, signature: WalletUtils.signMessage(message, TestData.copayers[0].privKey_1H_0), }; helpers.getAuthServer(wallet.copayers[1].id, function(server) { server.verifyMessageSignature(opts, function(err, isValid) { should.not.exist(err); isValid.should.be.false; done(); }); }); }); }); describe('#createAddress', function() { var server, wallet; describe('shared wallets (BIP45)', function() { beforeEach(function(done) { helpers.createAndJoinWallet(2, 2, { supportBIP44AndP2PKH: false }, function(s, w) { server = s; wallet = w; done(); }); }); it('should create address', function(done) { server.createAddress({}, function(err, address) { should.not.exist(err); should.exist(address); address.walletId.should.equal(wallet.id); address.network.should.equal('livenet'); address.address.should.equal('3BVJZ4CYzeTtawDtgwHvWV5jbvnXtYe97i'); address.isChange.should.be.false; address.path.should.equal('m/2147483647/0/0'); address.type.should.equal('P2SH'); server.getNotifications({}, function(err, notifications) { should.not.exist(err); var notif = _.find(notifications, { type: 'NewAddress' }); should.exist(notif); notif.data.address.should.equal(address.address); done(); }); }); }); it('should protect against storing same address multiple times', function(done) { server.createAddress({}, function(err, address) { should.not.exist(err); should.exist(address); delete address._id; server.storage.storeAddressAndWallet(wallet, address, function(err) { should.not.exist(err); server.getMainAddresses({}, function(err, addresses) { should.not.exist(err); addresses.length.should.equal(1); done(); }); }); }); }); it('should create many addresses on simultaneous requests', function(done) { var N = 5; async.map(_.range(N), function(i, cb) { server.createAddress({}, cb); }, function(err, addresses) { addresses.length.should.equal(N); _.each(_.range(N), function(i) { addresses[i].path.should.equal('m/2147483647/0/' + i); }); // No two identical addresses _.uniq(_.pluck(addresses, 'address')).length.should.equal(N); done(); }); }); }); describe('shared wallets (BIP44)', function() { beforeEach(function(done) { helpers.createAndJoinWallet(2, 2, function(s, w) { server = s; wallet = w; done(); }); }); it('should create address', function(done) { server.createAddress({}, function(err, address) { should.not.exist(err); should.exist(address); address.walletId.should.equal(wallet.id); address.network.should.equal('livenet'); address.address.should.equal('36q2G5FMGvJbPgAVEaiyAsFGmpkhPKwk2r'); address.isChange.should.be.false; address.path.should.equal('m/0/0'); address.type.should.equal('P2SH'); server.getNotifications({}, function(err, notifications) { should.not.exist(err); var notif = _.find(notifications, { type: 'NewAddress' }); should.exist(notif); notif.data.address.should.equal(address.address); done(); }); }); }); it('should create many addresses on simultaneous requests', function(done) { var N = 5; async.map(_.range(N), function(i, cb) { server.createAddress({}, cb); }, function(err, addresses) { addresses.length.should.equal(N); _.each(_.range(N), function(i) { addresses[i].path.should.equal('m/0/' + i); }); // No two identical addresses _.uniq(_.pluck(addresses, 'address')).length.should.equal(N); done(); }); }); it('should not create address if unable to store it', function(done) { sinon.stub(server.storage, 'storeAddressAndWallet').yields('dummy error'); server.createAddress({}, function(err, address) { should.exist(err); should.not.exist(address); server.getMainAddresses({}, function(err, addresses) { addresses.length.should.equal(0); server.storage.storeAddressAndWallet.restore(); server.createAddress({}, function(err, address) { should.not.exist(err); should.exist(address); done(); }); }); }); }); }); describe('1-of-1 (BIP44 & P2PKH)', function() { beforeEach(function(done) { helpers.createAndJoinWallet(1, 1, function(s, w) { server = s; wallet = w; w.copayers[0].id.should.equal(TestData.copayers[0].id44); done(); }); }); it('should create address', function(done) { server.createAddress({}, function(err, address) { should.not.exist(err); should.exist(address); address.walletId.should.equal(wallet.id); address.network.should.equal('livenet'); address.address.should.equal('1L3z9LPd861FWQhf3vDn89Fnc9dkdBo2CG'); address.isChange.should.be.false; address.path.should.equal('m/0/0'); address.type.should.equal('P2PKH'); server.getNotifications({}, function(err, notifications) { should.not.exist(err); var notif = _.find(notifications, { type: 'NewAddress' }); should.exist(notif); notif.data.address.should.equal(address.address); done(); }); }); }); it('should create many addresses on simultaneous requests', function(done) { var N = 5; async.map(_.range(N), function(i, cb) { server.createAddress({}, cb); }, function(err, addresses) { addresses.length.should.equal(N); _.each(_.range(N), function(i) { addresses[i].path.should.equal('m/0/' + i); }); // No two identical addresses _.uniq(_.pluck(addresses, 'address')).length.should.equal(N); done(); }); }); }); }); describe('Preferences', function() { var server, wallet; beforeEach(function(done) { helpers.createAndJoinWallet(2, 2, function(s, w) { server = s; wallet = w; done(); }); }); it('should save & retrieve preferences', function(done) { server.savePreferences({ email: '[email protected]', language: 'es', unit: 'bit', dummy: 'ignored', }, function(err) { should.not.exist(err); server.getPreferences({}, function(err, preferences) { should.not.exist(err); should.exist(preferences); preferences.email.should.equal('[email protected]'); preferences.language.should.equal('es'); preferences.unit.should.equal('bit'); should.not.exist(preferences.dummy); done(); }); }); }); it('should save preferences only for requesting copayer', function(done) { server.savePreferences({ email: '[email protected]' }, function(err) { should.not.exist(err); helpers.getAuthServer(wallet.copayers[1].id, function(server2) { server2.getPreferences({}, function(err, preferences) { should.not.exist(err); should.not.exist(preferences.email); done(); }); }); }); }); it('should save preferences incrementally', function(done) { async.series([ function(next) { server.savePreferences({ email: '[email protected]', }, next); }, function(next) { server.getPreferences({}, function(err, preferences) { should.not.exist(err); should.exist(preferences); preferences.email.should.equal('[email protected]'); should.not.exist(preferences.language); next(); }); }, function(next) { server.savePreferences({ language: 'es', }, next); }, function(next) { server.getPreferences({}, function(err, preferences) { should.not.exist(err); should.exist(preferences); preferences.language.should.equal('es'); preferences.email.should.equal('[email protected]'); next(); }); }, function(next) { server.savePreferences({ language: null, unit: 'bit', }, next); }, function(next) { server.getPreferences({}, function(err, preferences) { should.not.exist(err); should.exist(preferences); preferences.unit.should.equal('bit'); should.not.exist(preferences.language); preferences.email.should.equal('[email protected]'); next(); }); }, ], function(err) { should.not.exist(err); done(); }); }); it.skip('should save preferences only for requesting wallet', function(done) {}); it('should validate entries', function(done) { var invalid = [{ preferences: { email: ' ', }, expected: 'email' }, { preferences: { email: 'dummy@' + _.repeat('domain', 50), }, expected: 'email' }, { preferences: { language: 'xxxxx', }, expected: 'language' }, { preferences: { language: 123, }, expected: 'language' }, { preferences: { unit: 'xxxxx', }, expected: 'unit' }, ]; async.each(invalid, function(item, next) { server.savePreferences(item.preferences, function(err) { should.exist(err); err.message.should.contain(item.expected); next(); }); }, done); }); }); describe('#getUtxos', function() { var server, wallet; beforeEach(function(done) { helpers.createAndJoinWallet(1, 1, function(s, w) { server = s; wallet = w; done(); }); }); it('should get UTXOs for wallet addresses', function(done) { helpers.stubUtxos(server, wallet, [1, 2], function() { server.getUtxos({}, function(err, utxos) { should.not.exist(err); should.exist(utxos); utxos.length.should.equal(2); _.sum(utxos, 'satoshis').should.equal(3 * 1e8); server.getMainAddresses({}, function(err, addresses) { var utxo = utxos[0]; var address = _.find(addresses, { address: utxo.address }); should.exist(address); utxo.path.should.equal(address.path); utxo.publicKeys.should.deep.equal(address.publicKeys); done(); }); }); }); }); it('should get UTXOs for specific addresses', function(done) { helpers.stubUtxos(server, wallet, [1, 2, 3], function(utxos) { _.uniq(utxos, 'address').length.should.be.above(1); var address = utxos[0].address; var amount = _.sum(_.filter(utxos, { address: address }), 'satoshis'); server.getUtxos({ addresses: [address] }, function(err, utxos) { should.not.exist(err); should.exist(utxos); _.sum(utxos, 'satoshis').should.equal(amount); done(); }); }); }); }); describe('Multiple request Pub Keys', function() { var server, wallet; var opts, reqPrivKey, ws; var getAuthServer = function(copayerId, privKey, cb) { var msg = 'dummy'; var sig = WalletUtils.signMessage(msg, privKey); WalletService.getInstanceWithAuth({ copayerId: copayerId, message: msg, signature: sig, clientVersion: CLIENT_VERSION, }, function(err, server) { return cb(err, server); }); }; beforeEach(function() { reqPrivKey = new Bitcore.PrivateKey(); var requestPubKey = reqPrivKey.toPublicKey(); var xPrivKey = TestData.copayers[0].xPrivKey_44H_0H_0H; var sig = WalletUtils.signRequestPubKey(requestPubKey, xPrivKey); var copayerId = WalletUtils.xPubToCopayerId(TestData.copayers[0].xPubKey_44H_0H_0H); opts = { copayerId: copayerId, requestPubKey: requestPubKey, signature: sig, }; ws = new WalletService(); }); describe('#addAccess 1-1', function() { beforeEach(function(done) { helpers.createAndJoinWallet(1, 1, function(s, w) { server = s; wallet = w; helpers.stubUtxos(server, wallet, 1, function() { done(); }); }); }); it('should be able to re-gain access from xPrivKey', function(done) { ws.addAccess(opts, function(err, res) { should.not.exist(err); res.wallet.copayers[0].requestPubKeys.length.should.equal(2); res.wallet.copayers[0].requestPubKeys[0].selfSigned.should.equal(true); server.getBalance(res.wallet.walletId, function(err, bal) { should.not.exist(err); bal.totalAmount.should.equal(1e8); getAuthServer(opts.copayerId, reqPrivKey, function(err, server2) { server2.getBalance(res.wallet.walletId, function(err, bal2) { should.not.exist(err); bal2.totalAmount.should.equal(1e8); done(); }); }); }); }); }); it('should fail to gain access with wrong xPrivKey', function(done) { opts.signature = 'xx'; ws.addAccess(opts, function(err, res) { err.code.should.equal('NOT_AUTHORIZED'); done(); }); }); it('should fail to access with wrong privkey after gaining access', function(done) { ws.addAccess(opts, function(err, res) { should.not.exist(err); server.getBalance(res.wallet.walletId, function(err, bal) { should.not.exist(err); var privKey = new Bitcore.PrivateKey(); (getAuthServer(opts.copayerId, privKey, function(err, server2) { err.code.should.equal('NOT_AUTHORIZED'); done(); })); }); }); }); it('should be able to create TXs after regaining access', function(done) { ws.addAccess(opts, function(err, res) { should.not.exist(err); getAuthServer(opts.copayerId, reqPrivKey, function(err, server2) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.8, reqPrivKey); server2.createTx(txOpts, function(err, tx) { should.not.exist(err); done(); }); }); }); }); }); describe('#addAccess 2-2', function() { beforeEach(function(done) { helpers.createAndJoinWallet(2, 2, function(s, w) { server = s; wallet = w; helpers.stubUtxos(server, wallet, 1, function() { done(); }); }); }); it('should be able to re-gain access from xPrivKey', function(done) { ws.addAccess(opts, function(err, res) { should.not.exist(err); server.getBalance(res.wallet.walletId, function(err, bal) { should.not.exist(err); bal.totalAmount.should.equal(1e8); getAuthServer(opts.copayerId, reqPrivKey, function(err, server2) { server2.getBalance(res.wallet.walletId, function(err, bal2) { should.not.exist(err); bal2.totalAmount.should.equal(1e8); done(); }); }); }); }); }); it('TX proposals should include info to be verified', function(done) { ws.addAccess(opts, function(err, res) { should.not.exist(err); getAuthServer(opts.copayerId, reqPrivKey, function(err, server2) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.8, reqPrivKey); server2.createTx(txOpts, function(err, tx) { should.not.exist(err); server2.getPendingTxs({}, function(err, txs) { should.not.exist(err); should.exist(txs[0].proposalSignaturePubKey); should.exist(txs[0].proposalSignaturePubKeySig); done(); }); }); }); }); }); }); }); describe('#getBalance', function() { var server, wallet; beforeEach(function(done) { helpers.createAndJoinWallet(1, 1, function(s, w) { server = s; wallet = w; done(); }); }); it('should get balance', function(done) { helpers.stubUtxos(server, wallet, [1, 'u2', 3], function() { server.getBalance({}, function(err, balance) { should.not.exist(err); should.exist(balance); balance.totalAmount.should.equal(helpers.toSatoshi(6)); balance.lockedAmount.should.equal(0); balance.availableAmount.should.equal(helpers.toSatoshi(6)); balance.totalBytesToSendMax.should.equal(578); balance.totalConfirmedAmount.should.equal(helpers.toSatoshi(4)); balance.lockedConfirmedAmount.should.equal(0); balance.availableConfirmedAmount.should.equal(helpers.toSatoshi(4)); should.exist(balance.byAddress); balance.byAddress.length.should.equal(2); balance.byAddress[0].amount.should.equal(helpers.toSatoshi(4)); balance.byAddress[1].amount.should.equal(helpers.toSatoshi(2)); server.getMainAddresses({}, function(err, addresses) { should.not.exist(err); var addresses = _.uniq(_.pluck(addresses, 'address')); _.intersection(addresses, _.pluck(balance.byAddress, 'address')).length.should.equal(2); done(); }); }); }); }); it('should get balance when there are no addresses', function(done) { server.getBalance({}, function(err, balance) { should.not.exist(err); should.exist(balance); balance.totalAmount.should.equal(0); balance.lockedAmount.should.equal(0); balance.availableAmount.should.equal(0); balance.totalBytesToSendMax.should.equal(0); should.exist(balance.byAddress); balance.byAddress.length.should.equal(0); done(); }); }); it('should get balance when there are no funds', function(done) { blockchainExplorer.getUnspentUtxos = sinon.stub().callsArgWith(1, null, []); server.createAddress({}, function(err, address) { should.not.exist(err); server.getBalance({}, function(err, balance) { should.not.exist(err); should.exist(balance); balance.totalAmount.should.equal(0); balance.lockedAmount.should.equal(0); balance.availableAmount.should.equal(0); balance.totalBytesToSendMax.should.equal(0); should.exist(balance.byAddress); balance.byAddress.length.should.equal(0); done(); }); }); }); it('should only include addresses with balance', function(done) { helpers.stubUtxos(server, wallet, 1, function(utxos) { server.createAddress({}, function(err, address) { should.not.exist(err); server.getBalance({}, function(err, balance) { should.not.exist(err); balance.byAddress.length.should.equal(1); balance.byAddress[0].amount.should.equal(helpers.toSatoshi(1)); balance.byAddress[0].address.should.equal(utxos[0].address); done(); }); }); }); }); it('should return correct kb to send max', function(done) { helpers.stubUtxos(server, wallet, _.range(1, 10, 0), function() { server.getBalance({}, function(err, balance) { should.not.exist(err); should.exist(balance); balance.totalAmount.should.equal(helpers.toSatoshi(9)); balance.lockedAmount.should.equal(0); balance.totalBytesToSendMax.should.equal(1535); done(); }); }); }); it('should fail gracefully when blockchain is unreachable', function(done) { blockchainExplorer.getUnspentUtxos = sinon.stub().callsArgWith(1, 'dummy error'); server.createAddress({}, function(err, address) { should.not.exist(err); server.getBalance({}, function(err, balance) { should.exist(err); err.toString().should.equal('dummy error'); done(); }); }); }); }); describe('#getFeeLevels', function() { var server, wallet; beforeEach(function(done) { helpers.createAndJoinWallet(1, 1, function(s, w) { server = s; wallet = w; done(); }); }); it('should get current fee levels', function(done) { helpers.stubFeeLevels({ 1: 40000, 2: 20000, 6: 18000, }); server.getFeeLevels({}, function(err, fees) { should.not.exist(err); fees = _.zipObject(_.map(fees, function(item) { return [item.level, item]; })); fees.priority.feePerKb.should.equal(40000); fees.priority.nbBlocks.should.equal(1); fees.normal.feePerKb.should.equal(20000); fees.normal.nbBlocks.should.equal(2); fees.economy.feePerKb.should.equal(18000); fees.economy.nbBlocks.should.equal(6); done(); }); }); it('should get default fees if network cannot be accessed', function(done) { blockchainExplorer.estimateFee = sinon.stub().yields('dummy error'); server.getFeeLevels({}, function(err, fees) { should.not.exist(err); fees = _.zipObject(_.map(fees, function(item) { return [item.level, item.feePerKb]; })); fees.priority.should.equal(50000); fees.normal.should.equal(20000); fees.economy.should.equal(10000); done(); }); }); it('should get default fees if network cannot estimate (returns -1)', function(done) { helpers.stubFeeLevels({ 1: -1, 2: 18000, 6: 0, }); server.getFeeLevels({}, function(err, fees) { should.not.exist(err); fees = _.zipObject(_.map(fees, function(item) { return [item.level, item]; })); fees.priority.feePerKb.should.equal(50000); should.not.exist(fees.priority.nbBlocks); fees.normal.feePerKb.should.equal(18000); fees.normal.nbBlocks.should.equal(2); fees.economy.feePerKb.should.equal(0); fees.economy.nbBlocks.should.equal(6); done(); }); }); }); describe('Wallet not complete tests', function() { it('should fail to create address when wallet is not complete', function(done) { var server = new WalletService(); var walletOpts = { name: 'my wallet', m: 2, n: 3, pubKey: TestData.keyPair.pub, }; server.createWallet(walletOpts, function(err, walletId) { should.not.exist(err); var copayerOpts = helpers.getSignedCopayerOpts({ walletId: walletId, name: 'me', xPubKey: TestData.copayers[0].xPubKey_45H, requestPubKey: TestData.copayers[0].pubKey_1H_0, }); server.joinWallet(copayerOpts, function(err, result) { should.not.exist(err); helpers.getAuthServer(result.copayerId, function(server) { server.createAddress({}, function(err, address) { should.not.exist(address); should.exist(err); err.code.should.equal('WALLET_NOT_COMPLETE'); err.message.should.equal('Wallet is not complete'); done(); }); }); }); }); }); it('should fail to create tx when wallet is not complete', function(done) { var server = new WalletService(); var walletOpts = { name: 'my wallet', m: 2, n: 3, pubKey: TestData.keyPair.pub, }; server.createWallet(walletOpts, function(err, walletId) { should.not.exist(err); var copayerOpts = helpers.getSignedCopayerOpts({ walletId: walletId, name: 'me', xPubKey: TestData.copayers[0].xPubKey_45H, requestPubKey: TestData.copayers[0].pubKey_1H_0, }); server.joinWallet(copayerOpts, function(err, result) { should.not.exist(err); helpers.getAuthServer(result.copayerId, function(server, wallet) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, TestData.copayers[0].privKey); server.createTx(txOpts, function(err, tx) { should.not.exist(tx); should.exist(err); err.code.should.equal('WALLET_NOT_COMPLETE'); done(); }); }); }); }); }); }); describe('#createTx', function() { var server, wallet; beforeEach(function(done) { helpers.createAndJoinWallet(2, 3, function(s, w) { server = s; wallet = w; done(); }); }); it('should create a tx', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, TestData.copayers[0].privKey_1H_0, { message: 'some message', customData: 'some custom data' }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); tx.walletId.should.equal(wallet.id); tx.network.should.equal('livenet'); tx.creatorId.should.equal(wallet.copayers[0].id); tx.message.should.equal('some message'); tx.customData.should.equal('some custom data'); tx.isAccepted().should.equal.false; tx.isRejected().should.equal.false; tx.amount.should.equal(helpers.toSatoshi(80)); var estimatedFee = WalletUtils.DEFAULT_FEE_PER_KB * 400 / 1000; // fully signed tx should have about 400 bytes tx.fee.should.be.within(0.9 * estimatedFee, 1.1 * estimatedFee); server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs.length.should.equal(1); // creator txs[0].deleteLockTime.should.equal(0); server.getBalance({}, function(err, balance) { should.not.exist(err); balance.totalAmount.should.equal(helpers.toSatoshi(300)); balance.lockedAmount.should.equal(tx.inputs[0].satoshis); balance.lockedAmount.should.be.below(balance.totalAmount); balance.availableAmount.should.equal(balance.totalAmount - balance.lockedAmount); server.storage.fetchAddresses(wallet.id, function(err, addresses) { should.not.exist(err); var change = _.filter(addresses, { isChange: true }); change.length.should.equal(1); done(); }); }); }); }); }); }); it('should create a tx with legacy signature', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var txOpts = helpers.createProposalOptsLegacy('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, 'some message', TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); done(); }); }); }); it('should create a tx using confirmed utxos first', function(done) { helpers.stubUtxos(server, wallet, [1.3, 'u0.5', 'u0.1', 1.2], function(utxos) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 1.5, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); tx.inputs.length.should.equal(2); _.difference(_.pluck(tx.inputs, 'txid'), [utxos[0].txid, utxos[3].txid]).length.should.equal(0); done(); }); }); }); it('should use unconfirmed utxos only when no more confirmed utxos are available', function(done) { helpers.stubUtxos(server, wallet, [1.3, 'u0.5', 'u0.1', 1.2], function(utxos) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 2.55, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); tx.inputs.length.should.equal(3); var txids = _.pluck(tx.inputs, 'txid'); txids.should.contain(utxos[0].txid); txids.should.contain(utxos[3].txid); done(); }); }); }); it('should exclude unconfirmed utxos if specified', function(done) { helpers.stubUtxos(server, wallet, [1.3, 'u2', 'u0.1', 1.2], function(utxos) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 3, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); txOpts.excludeUnconfirmedUtxos = true; server.createTx(txOpts, function(err, tx) { should.exist(err); err.code.should.equal('INSUFFICIENT_FUNDS'); err.message.should.equal('Insufficient funds'); var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 2.5, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); txOpts.excludeUnconfirmedUtxos = true; server.createTx(txOpts, function(err, tx) { should.exist(err); err.code.should.equal('INSUFFICIENT_FUNDS_FOR_FEE'); err.message.should.equal('Insufficient funds for fee'); done(); }); }); }); }); it('should use non-locked confirmed utxos when specified', function(done) { helpers.stubUtxos(server, wallet, [1.3, 'u2', 'u0.1', 1.2], function(utxos) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 1.4, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); txOpts.excludeUnconfirmedUtxos = true; server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); tx.inputs.length.should.equal(2); server.getBalance({}, function(err, balance) { should.not.exist(err); balance.lockedConfirmedAmount.should.equal(helpers.toSatoshi(2.5)); balance.availableConfirmedAmount.should.equal(0); var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.01, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); txOpts.excludeUnconfirmedUtxos = true; server.createTx(txOpts, function(err, tx) { should.exist(err); err.code.should.equal('LOCKED_FUNDS'); done(); }); }); }); }); }); it('should fail gracefully if unable to reach the blockchain', function(done) { blockchainExplorer.getUnspentUtxos = sinon.stub().callsArgWith(1, 'dummy error'); server.createAddress({}, function(err, address) { should.not.exist(err); var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { should.exist(err); err.toString().should.equal('dummy error'); done(); }); }); }); it('should fail to create tx with invalid proposal signature', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, 'dummy'); server.createTx(txOpts, function(err, tx) { should.not.exist(tx); should.exist(err); err.message.should.equal('Invalid proposal signature'); done(); }); }); }); it('should fail to create tx with proposal signed by another copayer', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, TestData.copayers[1].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(tx); should.exist(err); err.message.should.equal('Invalid proposal signature'); done(); }); }); }); it('should fail to create tx for invalid address', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var txOpts = helpers.createSimpleProposalOpts('invalid address', 80, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.exist(err); should.not.exist(tx); // may fail due to Non-base58 character, or Checksum mismatch, or other done(); }); }); }); it('should fail to create tx for address of different network', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var txOpts = helpers.createSimpleProposalOpts('myE38JHdxmQcTJGP1ZiX4BiGhDxMJDvLJD', 80, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(tx); should.exist(err); err.code.should.equal('INCORRECT_ADDRESS_NETWORK'); err.message.should.equal('Incorrect address network'); done(); }); }); }); it('should fail to create tx for invalid amount', function(done) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(tx); should.exist(err); err.message.should.equal('Invalid amount'); done(); }); }); it('should fail to create tx when insufficient funds', function(done) { helpers.stubUtxos(server, wallet, [100], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 120, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.exist(err); err.code.should.equal('INSUFFICIENT_FUNDS'); err.message.should.equal('Insufficient funds'); server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs.length.should.equal(0); server.getBalance({}, function(err, balance) { should.not.exist(err); balance.lockedAmount.should.equal(0); balance.totalAmount.should.equal(10000000000); done(); }); }); }); }); }); it('should fail to create tx when insufficient funds for fee', function(done) { helpers.stubUtxos(server, wallet, 0.048222, function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.048200, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.exist(err); err.code.should.equal('INSUFFICIENT_FUNDS_FOR_FEE'); err.message.should.equal('Insufficient funds for fee'); done(); }); }); }); it('should scale fees according to tx size', function(done) { helpers.stubUtxos(server, wallet, [1, 1, 1, 1], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 3.5, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(err); var estimatedFee = WalletUtils.DEFAULT_FEE_PER_KB * 1300 / 1000; // fully signed tx should have about 1300 bytes tx.fee.should.be.within(0.9 * estimatedFee, 1.1 * estimatedFee); done(); }); }); }); it('should be possible to use a smaller fee', function(done) { helpers.stubUtxos(server, wallet, 1, function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.99995, TestData.copayers[0].privKey_1H_0, { feePerKb: 80000 }); server.createTx(txOpts, function(err, tx) { should.exist(err); err.code.should.equal('INSUFFICIENT_FUNDS_FOR_FEE'); var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.99995, TestData.copayers[0].privKey_1H_0, { feePerKb: 5000 }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); var estimatedFee = 5000 * 400 / 1000; // fully signed tx should have about 400 bytes tx.fee.should.be.within(0.9 * estimatedFee, 1.1 * estimatedFee); // Sign it to make sure Bitcore doesn't complain about the fees var signatures = helpers.clientSign(tx, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: tx.id, signatures: signatures, }, function(err) { should.not.exist(err); done(); }); }); }); }); }); it('should fail to create tx for dust amount', function(done) { helpers.stubUtxos(server, wallet, [1], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.00000001, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.exist(err); err.code.should.equal('DUST_AMOUNT'); err.message.should.equal('Amount below dust threshold'); done(); }); }); }); it('should fail to create tx that would return change for dust amount', function(done) { helpers.stubUtxos(server, wallet, [1], function() { var fee = 4095 / 1e8; // The exact fee of the resulting tx var change = 100 / 1e8; // Below dust var amount = 1 - fee - change; var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', amount, TestData.copayers[0].privKey_1H_0, { feePerKb: 10000 }); server.createTx(txOpts, function(err, tx) { should.exist(err); err.code.should.equal('DUST_AMOUNT'); err.message.should.equal('Amount below dust threshold'); done(); }); }); }); it('should fail with different error for insufficient funds and locked funds', function(done) { helpers.stubUtxos(server, wallet, [10, 10], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 11, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(err); server.getBalance({}, function(err, balance) { should.not.exist(err); balance.totalAmount.should.equal(helpers.toSatoshi(20)); balance.lockedAmount.should.equal(helpers.toSatoshi(20)); txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 8, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.exist(err); err.code.should.equal('LOCKED_FUNDS'); err.message.should.equal('Funds are locked by pending transaction proposals'); done(); }); }); }); }); }); it('should create tx with 0 change output', function(done) { helpers.stubUtxos(server, wallet, [1], function() { var fee = 4100 / 1e8; // The exact fee of the resulting tx var amount = 1 - fee; var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', amount, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); var bitcoreTx = tx.getBitcoreTx(); bitcoreTx.outputs.length.should.equal(1); bitcoreTx.outputs[0].satoshis.should.equal(tx.amount); done(); }); }); }); it('should fail gracefully when bitcore throws exception on raw tx creation', function(done) { helpers.stubUtxos(server, wallet, [10], function() { var bitcoreStub = sinon.stub(Bitcore, 'Transaction'); bitcoreStub.throws({ name: 'dummy', message: 'dummy exception' }); var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 2, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.exist(err); err.message.should.equal('dummy exception'); bitcoreStub.restore(); done(); }); }); }); it('should create tx when there is a pending tx and enough UTXOs', function(done) { helpers.stubUtxos(server, wallet, [10.1, 10.2, 10.3], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 12, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); var txOpts2 = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 8, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts2, function(err, tx) { should.not.exist(err); should.exist(tx); server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs.length.should.equal(2); server.getBalance({}, function(err, balance) { should.not.exist(err); balance.totalAmount.should.equal(3060000000); balance.lockedAmount.should.equal(3060000000); done(); }); }); }); }); }); }); it('should fail to create tx when there is a pending tx and not enough UTXOs', function(done) { helpers.stubUtxos(server, wallet, [10.1, 10.2, 10.3], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 12, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); var txOpts2 = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 24, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts2, function(err, tx) { err.code.should.equal('LOCKED_FUNDS'); should.not.exist(tx); server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs.length.should.equal(1); server.getBalance({}, function(err, balance) { should.not.exist(err); balance.totalAmount.should.equal(helpers.toSatoshi(30.6)); var amountInputs = _.sum(txs[0].inputs, 'satoshis'); balance.lockedAmount.should.equal(amountInputs); balance.lockedAmount.should.be.below(balance.totalAmount); balance.availableAmount.should.equal(balance.totalAmount - balance.lockedAmount); done(); }); }); }); }); }); }); it('should create tx using different UTXOs for simultaneous requests', function(done) { var N = 5; helpers.stubUtxos(server, wallet, _.range(100, 100 + N, 0), function(utxos) { server.getBalance({}, function(err, balance) { should.not.exist(err); balance.totalAmount.should.equal(helpers.toSatoshi(N * 100)); balance.lockedAmount.should.equal(0); var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, TestData.copayers[0].privKey_1H_0); async.map(_.range(N), function(i, cb) { server.createTx(txOpts, function(err, tx) { cb(err, tx); }); }, function(err) { server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs.length.should.equal(N); _.uniq(_.pluck(txs, 'changeAddress')).length.should.equal(N); server.getBalance({}, function(err, balance) { should.not.exist(err); balance.totalAmount.should.equal(helpers.toSatoshi(N * 100)); balance.lockedAmount.should.equal(balance.totalAmount); done(); }); }); }); }); }); }); it('should create tx for type multiple_outputs', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var outputs = [{ toAddress: '18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', amount: 75, message: 'message #1' }, { toAddress: '18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', amount: 75, message: 'message #2' }]; var txOpts = helpers.createProposalOpts(Model.TxProposal.Types.MULTIPLEOUTPUTS, outputs, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); done(); }); }); }); it('should fail to create tx for type multiple_outputs with missing output argument', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var outputs = [{ amount: 80, message: 'message #1', }, { toAddress: '18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', amount: 90, message: 'message #2' }]; var txOpts = helpers.createProposalOpts(Model.TxProposal.Types.MULTIPLEOUTPUTS, outputs, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { should.exist(err); err.message.should.contain('outputs argument missing'); done(); }); }); }); it('should fail to create tx for unsupported proposal type', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); txOpts.type = 'bogus'; server.createTx(txOpts, function(err, tx) { should.exist(err); err.message.should.contain('Invalid proposal type'); done(); }); }); }); it('should be able to send max amount', function(done) { helpers.stubUtxos(server, wallet, _.range(1, 10, 0), function() { server.getBalance({}, function(err, balance) { should.not.exist(err); balance.totalAmount.should.equal(helpers.toSatoshi(9)); balance.lockedAmount.should.equal(0); balance.availableAmount.should.equal(helpers.toSatoshi(9)); balance.totalBytesToSendMax.should.equal(2896); var fee = parseInt((balance.totalBytesToSendMax * 10000 / 1000).toFixed(0)); var max = balance.availableAmount - fee; var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', max / 1e8, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); tx.amount.should.equal(max); var estimatedFee = 2896 * 10000 / 1000; tx.fee.should.be.within(0.9 * estimatedFee, 1.1 * estimatedFee); server.getBalance({}, function(err, balance) { should.not.exist(err); balance.lockedAmount.should.equal(helpers.toSatoshi(9)); balance.availableAmount.should.equal(0); done(); }); }); }); }); }); it('should be able to send max non-locked amount', function(done) { helpers.stubUtxos(server, wallet, _.range(1, 10, 0), function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 3.5, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(err); server.getBalance({}, function(err, balance) { should.not.exist(err); balance.totalAmount.should.equal(helpers.toSatoshi(9)); balance.lockedAmount.should.equal(helpers.toSatoshi(4)); balance.availableAmount.should.equal(helpers.toSatoshi(5)); balance.totalBytesToSendMax.should.equal(1653); var fee = parseInt((balance.totalBytesToSendMax * 2000 / 1000).toFixed(0)); var max = balance.availableAmount - fee; var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', max / 1e8, TestData.copayers[0].privKey_1H_0, { feePerKb: 2000 }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); tx.amount.should.equal(max); var estimatedFee = 1653 * 2000 / 1000; tx.fee.should.be.within(0.9 * estimatedFee, 1.1 * estimatedFee); server.getBalance({}, function(err, balance) { should.not.exist(err); balance.lockedAmount.should.equal(helpers.toSatoshi(9)); done(); }); }); }); }); }); }); it('should not use UTXO provided in utxosToExclude option', function(done) { helpers.stubUtxos(server, wallet, [1, 2, 3], function(utxos) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 4.5, TestData.copayers[0].privKey_1H_0); txOpts.utxosToExclude = [utxos[1].txid + ':' + utxos[1].vout]; server.createTx(txOpts, function(err, tx) { should.exist(err); err.code.should.equal('INSUFFICIENT_FUNDS'); err.message.should.equal('Insufficient funds'); done(); }); }); }); it('should use non-excluded UTXOs', function(done) { helpers.stubUtxos(server, wallet, [1, 2], function(utxos) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.5, TestData.copayers[0].privKey_1H_0); txOpts.utxosToExclude = [utxos[0].txid + ':' + utxos[0].vout]; server.createTx(txOpts, function(err, tx) { should.not.exist(err); tx.inputs.length.should.equal(1); tx.inputs[0].txid.should.equal(utxos[1].txid); tx.inputs[0].vout.should.equal(utxos[1].vout); done(); }); }); }); }); describe('#createTx backoff time', function(done) { var server, wallet, txid; beforeEach(function(done) { helpers.createAndJoinWallet(2, 2, function(s, w) { server = s; wallet = w; helpers.stubUtxos(server, wallet, _.range(2, 6), function() { done(); }); }); }); it('should follow backoff time after consecutive rejections', function(done) { async.series([ function(next) { async.each(_.range(3), function(i, next) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 1, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(err); server.rejectTx({ txProposalId: tx.id, reason: 'some reason', }, next); }); }, next); }, function(next) { // Allow a 4th tx var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 1, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { server.rejectTx({ txProposalId: tx.id, reason: 'some reason', }, next); }); }, function(next) { // Do not allow before backoff time var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 1, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.exist(err); err.code.should.equal('TX_CANNOT_CREATE'); next(); }); }, function(next) { var clock = sinon.useFakeTimers(Date.now() + (WalletService.BACKOFF_TIME + 2) * 60 * 1000, 'Date'); var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 1, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { clock.restore(); server.rejectTx({ txProposalId: tx.id, reason: 'some reason', }, next); }); }, function(next) { // Do not allow a 5th tx before backoff time var clock = sinon.useFakeTimers(Date.now() + (WalletService.BACKOFF_TIME + 2) * 60 * 1000 + 1, 'Date'); var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 1, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { clock.restore(); should.exist(err); err.code.should.equal('TX_CANNOT_CREATE'); next(); }); }, ], function(err) { should.not.exist(err); done(); }); }); }); describe('#rejectTx', function() { var server, wallet, txid; beforeEach(function(done) { helpers.createAndJoinWallet(2, 2, function(s, w) { server = s; wallet = w; helpers.stubUtxos(server, wallet, _.range(1, 9), function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 10, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); txid = tx.id; done(); }); }); }); }); it('should reject a TX', function(done) { server.getPendingTxs({}, function(err, txs) { var tx = txs[0]; tx.id.should.equal(txid); server.rejectTx({ txProposalId: txid, reason: 'some reason', }, function(err) { should.not.exist(err); server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs.should.be.empty; server.getTx({ txProposalId: txid }, function(err, tx) { var actors = tx.getActors(); actors.length.should.equal(1); actors[0].should.equal(wallet.copayers[0].id); var action = tx.getActionBy(wallet.copayers[0].id); action.type.should.equal('reject'); action.comment.should.equal('some reason'); done(); }); }); }); }); }); it('should fail to reject non-pending TX', function(done) { async.waterfall([ function(next) { server.getPendingTxs({}, function(err, txs) { var tx = txs[0]; tx.id.should.equal(txid); next(); }); }, function(next) { server.rejectTx({ txProposalId: txid, reason: 'some reason', }, function(err) { should.not.exist(err); next(); }); }, function(next) { server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs.should.be.empty; next(); }); }, function(next) { helpers.getAuthServer(wallet.copayers[1].id, function(server) { server.rejectTx({ txProposalId: txid, reason: 'some other reason', }, function(err) { should.exist(err); err.code.should.equal('TX_NOT_PENDING'); done(); }); }); }, ]); }); }); describe('#signTx', function() { describe('1-of-1 (BIP44 & P2PKH)', function() { var server, wallet, txid; beforeEach(function(done) { helpers.createAndJoinWallet(1, 1, function(s, w) { server = s; wallet = w; helpers.stubUtxos(server, wallet, [1, 2], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 2.5, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); tx.derivationStrategy.should.equal('BIP44'); tx.addressType.should.equal('P2PKH'); txid = tx.id; done(); }); }); }); }); it('should sign a TX with multiple inputs, different paths, and return raw', function(done) { blockchainExplorer.getTransaction = sinon.stub().callsArgWith(1, null, null); server.getPendingTxs({}, function(err, txs) { var tx = txs[0]; tx.id.should.equal(txid); var signatures = helpers.clientSign(tx, TestData.copayers[0].xPrivKey); should.not.exist(tx.raw); server.signTx({ txProposalId: txid, signatures: signatures, }, function(err, txp) { should.not.exist(err); txp.status.should.equal('accepted'); // The raw Tx should contain the Signatures. txp.raw.should.contain(signatures[0]); // Get pending should also contains the raw TX server.getPendingTxs({}, function(err, txs) { var tx = txs[0]; should.not.exist(err); tx.status.should.equal('accepted'); tx.raw.should.contain(signatures[0]); done(); }); }); }); }); }); describe('Multisig', function() { var server, wallet, txid; beforeEach(function(done) { helpers.createAndJoinWallet(2, 3, function(s, w) { server = s; wallet = w; helpers.stubUtxos(server, wallet, _.range(1, 9), function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 20, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); txid = tx.id; done(); }); }); }); }); it('should sign a TX with multiple inputs, different paths', function(done) { server.getPendingTxs({}, function(err, txs) { var tx = txs[0]; tx.id.should.equal(txid); var signatures = helpers.clientSign(tx, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: txid, signatures: signatures, }, function(err, txp) { should.not.exist(err); should.not.exist(tx.raw); server.getPendingTxs({}, function(err, txs) { should.not.exist(err); var tx = txs[0]; tx.id.should.equal(txid); var actors = tx.getActors(); actors.length.should.equal(1); actors[0].should.equal(wallet.copayers[0].id); tx.getActionBy(wallet.copayers[0].id).type.should.equal('accept'); done(); }); }); }); }); it('should fail to sign with a xpriv from other copayer', function(done) { server.getPendingTxs({}, function(err, txs) { var tx = txs[0]; tx.id.should.equal(txid); var signatures = helpers.clientSign(tx, TestData.copayers[1].xPrivKey); server.signTx({ txProposalId: txid, signatures: signatures, }, function(err) { err.code.should.equal('BAD_SIGNATURES'); done(); }); }); }); it('should fail if one signature is broken', function(done) { server.getPendingTxs({}, function(err, txs) { var tx = txs[0]; tx.id.should.equal(txid); var signatures = helpers.clientSign(tx, TestData.copayers[0].xPrivKey); signatures[0] = 1; server.signTx({ txProposalId: txid, signatures: signatures, }, function(err) { err.message.should.contain('signatures'); done(); }); }); }); it('should fail on invalid signature', function(done) { server.getPendingTxs({}, function(err, txs) { var tx = txs[0]; tx.id.should.equal(txid); var signatures = ['11', '22', '33', '44', '55']; server.signTx({ txProposalId: txid, signatures: signatures, }, function(err) { should.exist(err); err.message.should.contain('Bad signatures'); done(); }); }); }); it('should fail on wrong number of invalid signatures', function(done) { server.getPendingTxs({}, function(err, txs) { var tx = txs[0]; tx.id.should.equal(txid); var signatures = _.take(helpers.clientSign(tx, TestData.copayers[0].xPrivKey), tx.inputs.length - 1); server.signTx({ txProposalId: txid, signatures: signatures, }, function(err) { should.exist(err); err.message.should.contain('Bad signatures'); done(); }); }); }); it('should fail when signing a TX previously rejected', function(done) { server.getPendingTxs({}, function(err, txs) { var tx = txs[0]; tx.id.should.equal(txid); var signatures = helpers.clientSign(tx, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: txid, signatures: signatures, }, function(err) { server.rejectTx({ txProposalId: txid, }, function(err) { err.code.should.contain('COPAYER_VOTED'); done(); }); }); }); }); it('should fail when rejected a previously signed TX', function(done) { server.getPendingTxs({}, function(err, txs) { var tx = txs[0]; tx.id.should.equal(txid); server.rejectTx({ txProposalId: txid, }, function(err) { var signatures = helpers.clientSign(tx, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: txid, signatures: signatures, }, function(err) { err.code.should.contain('COPAYER_VOTED'); done(); }); }); }); }); it('should fail to sign a non-pending TX', function(done) { async.waterfall([ function(next) { server.rejectTx({ txProposalId: txid, reason: 'some reason', }, function(err) { should.not.exist(err); next(); }); }, function(next) { helpers.getAuthServer(wallet.copayers[1].id, function(server) { server.rejectTx({ txProposalId: txid, reason: 'some reason', }, function(err) { should.not.exist(err); next(); }); }); }, function(next) { server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs.should.be.empty; next(); }); }, function(next) { helpers.getAuthServer(wallet.copayers[2].id, function(server) { server.getTx({ txProposalId: txid }, function(err, tx) { should.not.exist(err); var signatures = helpers.clientSign(tx, TestData.copayers[2].xPrivKey); server.signTx({ txProposalId: txid, signatures: signatures, }, function(err) { should.exist(err); err.code.should.equal('TX_NOT_PENDING'); done(); }); }); }); }, ]); }); }); }); describe('#broadcastTx & #broadcastRawTx', function() { var server, wallet, txpid, txid; beforeEach(function(done) { helpers.createAndJoinWallet(1, 1, function(s, w) { server = s; wallet = w; helpers.stubUtxos(server, wallet, [10, 10], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 9, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, txp) { should.not.exist(err); should.exist(txp); var signatures = helpers.clientSign(txp, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: txp.id, signatures: signatures, }, function(err, txp) { should.not.exist(err); should.exist(txp); txp.isAccepted().should.be.true; txp.isBroadcasted().should.be.false; txid = txp.txid; txpid = txp.id; done(); }); }); }); }); }); it('should broadcast a tx', function(done) { var clock = sinon.useFakeTimers(1234000, 'Date'); helpers.stubBroadcast(); server.broadcastTx({ txProposalId: txpid }, function(err) { should.not.exist(err); server.getTx({ txProposalId: txpid }, function(err, txp) { should.not.exist(err); should.not.exist(txp.raw); txp.txid.should.equal(txid); txp.isBroadcasted().should.be.true; txp.broadcastedOn.should.equal(1234); clock.restore(); done(); }); }); }); it('should broadcast a raw tx', function(done) { helpers.stubBroadcast(); server.broadcastRawTx({ network: 'testnet', rawTx: 'raw tx', }, function(err, txid) { should.not.exist(err); should.exist(txid); done(); }); }); it('should fail to brodcast a tx already marked as broadcasted', function(done) { helpers.stubBroadcast(); server.broadcastTx({ txProposalId: txpid }, function(err) { should.not.exist(err); server.broadcastTx({ txProposalId: txpid }, function(err) { should.exist(err); err.code.should.equal('TX_ALREADY_BROADCASTED'); done(); }); }); }); it('should auto process already broadcasted txs', function(done) { helpers.stubBroadcast(); server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs.length.should.equal(1); blockchainExplorer.getTransaction = sinon.stub().callsArgWith(1, null, { txid: 999 }); server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs.length.should.equal(0); done(); }); }); }); it('should process only broadcasted txs', function(done) { helpers.stubBroadcast(); var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 9, TestData.copayers[0].privKey_1H_0, { message: 'some message 2' }); server.createTx(txOpts, function(err, txp) { should.not.exist(err); server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs.length.should.equal(2); blockchainExplorer.getTransaction = sinon.stub().callsArgWith(1, null, { txid: 999 }); server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs.length.should.equal(1); txs[0].status.should.equal('pending'); should.not.exist(txs[0].txid); done(); }); }); }); }); it('should fail to brodcast a not yet accepted tx', function(done) { helpers.stubBroadcast(); var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 9, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, txp) { should.not.exist(err); should.exist(txp); server.broadcastTx({ txProposalId: txp.id }, function(err) { should.exist(err); err.code.should.equal('TX_NOT_ACCEPTED'); done(); }); }); }); it('should keep tx as accepted if unable to broadcast it', function(done) { blockchainExplorer.broadcast = sinon.stub().callsArgWith(1, 'broadcast error'); blockchainExplorer.getTransaction = sinon.stub().callsArgWith(1, null, null); server.broadcastTx({ txProposalId: txpid }, function(err) { should.exist(err); err.toString().should.equal('broadcast error'); server.getTx({ txProposalId: txpid }, function(err, txp) { should.not.exist(err); should.exist(txp.txid); txp.isBroadcasted().should.be.false; should.not.exist(txp.broadcastedOn); txp.isAccepted().should.be.true; done(); }); }); }); it('should mark tx as broadcasted if accepted but already in blockchain', function(done) { blockchainExplorer.broadcast = sinon.stub().callsArgWith(1, 'broadcast error'); blockchainExplorer.getTransaction = sinon.stub().callsArgWith(1, null, { txid: '999' }); server.broadcastTx({ txProposalId: txpid }, function(err) { should.not.exist(err); server.getTx({ txProposalId: txpid }, function(err, txp) { should.not.exist(err); should.exist(txp.txid); txp.isBroadcasted().should.be.true; should.exist(txp.broadcastedOn); done(); }); }); }); it('should keep tx as accepted if broadcast fails and cannot check tx in blockchain', function(done) { blockchainExplorer.broadcast = sinon.stub().callsArgWith(1, 'broadcast error'); blockchainExplorer.getTransaction = sinon.stub().callsArgWith(1, 'bc check error'); server.broadcastTx({ txProposalId: txpid }, function(err) { should.exist(err); err.toString().should.equal('bc check error'); server.getTx({ txProposalId: txpid }, function(err, txp) { should.not.exist(err); should.exist(txp.txid); txp.isBroadcasted().should.be.false; should.not.exist(txp.broadcastedOn); txp.isAccepted().should.be.true; done(); }); }); }); }); describe('Tx proposal workflow', function() { var server, wallet; beforeEach(function(done) { helpers.createAndJoinWallet(2, 3, function(s, w) { server = s; wallet = w; helpers.stubUtxos(server, wallet, _.range(1, 9), function() { helpers.stubBroadcast(); done(); }); }); }); it('other copayers should see pending proposal created by one copayer', function(done) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 10, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, txp) { should.not.exist(err); should.exist(txp); helpers.getAuthServer(wallet.copayers[1].id, function(server2, wallet) { server2.getPendingTxs({}, function(err, txps) { should.not.exist(err); txps.length.should.equal(1); txps[0].id.should.equal(txp.id); txps[0].message.should.equal('some message'); done(); }); }); }); }); it('tx proposals should not be finally accepted until quorum is reached', function(done) { var txpId; async.waterfall([ function(next) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 10, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, txp) { txpId = txp.id; should.not.exist(err); should.exist(txp); next(); }); }, function(next) { server.getPendingTxs({}, function(err, txps) { should.not.exist(err); txps.length.should.equal(1); var txp = txps[0]; txp.actions.should.be.empty; next(null, txp); }); }, function(txp, next) { var signatures = helpers.clientSign(txp, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: txpId, signatures: signatures, }, function(err) { should.not.exist(err); next(); }); }, function(next) { server.getPendingTxs({}, function(err, txps) { should.not.exist(err); txps.length.should.equal(1); var txp = txps[0]; txp.isPending().should.be.true; txp.isAccepted().should.be.false; txp.isRejected().should.be.false; txp.isBroadcasted().should.be.false; txp.actions.length.should.equal(1); var action = txp.getActionBy(wallet.copayers[0].id); action.type.should.equal('accept'); server.getNotifications({}, function(err, notifications) { should.not.exist(err); var last = _.last(notifications); last.type.should.not.equal('TxProposalFinallyAccepted'); next(null, txp); }); }); }, function(txp, next) { helpers.getAuthServer(wallet.copayers[1].id, function(server, wallet) { var signatures = helpers.clientSign(txp, TestData.copayers[1].xPrivKey); server.signTx({ txProposalId: txpId, signatures: signatures, }, function(err) { should.not.exist(err); next(); }); }); }, function(next) { server.getPendingTxs({}, function(err, txps) { should.not.exist(err); txps.length.should.equal(1); var txp = txps[0]; txp.isPending().should.be.true; txp.isAccepted().should.be.true; txp.isBroadcasted().should.be.false; should.exist(txp.txid); txp.actions.length.should.equal(2); server.getNotifications({}, function(err, notifications) { should.not.exist(err); var last = _.last(notifications); last.type.should.equal('TxProposalFinallyAccepted'); last.walletId.should.equal(wallet.id); last.creatorId.should.equal(wallet.copayers[1].id); last.data.txProposalId.should.equal(txp.id); done(); }); }); }, ]); }); it('tx proposals should accept as many rejections as possible without finally rejecting', function(done) { var txpId; async.waterfall([ function(next) { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 10, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, txp) { txpId = txp.id; should.not.exist(err); should.exist(txp); next(); }); }, function(next) { server.getPendingTxs({}, function(err, txps) { should.not.exist(err); txps.length.should.equal(1); var txp = txps[0]; txp.actions.should.be.empty; next(); }); }, function(next) { server.rejectTx({ txProposalId: txpId, reason: 'just because' }, function(err) { should.not.exist(err); next(); }); }, function(next) { server.getPendingTxs({}, function(err, txps) { should.not.exist(err); txps.length.should.equal(1); var txp = txps[0]; txp.isPending().should.be.true; txp.isRejected().should.be.false; txp.isAccepted().should.be.false; txp.actions.length.should.equal(1); var action = txp.getActionBy(wallet.copayers[0].id); action.type.should.equal('reject'); action.comment.should.equal('just because'); next(); }); }, function(next) { helpers.getAuthServer(wallet.copayers[1].id, function(server, wallet) { server.rejectTx({ txProposalId: txpId, reason: 'some other reason' }, function(err) { should.not.exist(err); next(); }); }); }, function(next) { server.getPendingTxs({}, function(err, txps) { should.not.exist(err); txps.length.should.equal(0); next(); }); }, function(next) { server.getTx({ txProposalId: txpId }, function(err, txp) { should.not.exist(err); txp.isPending().should.be.false; txp.isRejected().should.be.true; txp.isAccepted().should.be.false; txp.actions.length.should.equal(2); done(); }); }, ]); }); }); describe('#getTx', function() { var server, wallet, txpid; beforeEach(function(done) { helpers.createAndJoinWallet(2, 3, function(s, w) { server = s; wallet = w; helpers.stubUtxos(server, wallet, 10, function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 9, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, txp) { should.not.exist(err); should.exist(txp); txpid = txp.id; done(); }); }); }); }); it('should get own transaction proposal', function(done) { server.getTx({ txProposalId: txpid }, function(err, txp) { should.not.exist(err); should.exist(txp); txp.id.should.equal(txpid); done(); }); }); it('should get someone elses transaction proposal', function(done) { helpers.getAuthServer(wallet.copayers[1].id, function(server2, wallet) { server2.getTx({ txProposalId: txpid }, function(err, res) { should.not.exist(err); res.id.should.equal(txpid); done(); }); }); }); it('should fail to get non-existent transaction proposal', function(done) { server.getTx({ txProposalId: 'dummy' }, function(err, txp) { should.exist(err); should.not.exist(txp); err.code.should.equal('TX_NOT_FOUND') err.message.should.equal('Transaction proposal not found'); done(); }); }); it.skip('should get accepted/rejected transaction proposal', function(done) {}); it.skip('should get broadcasted transaction proposal', function(done) {}); }); describe('#getTxs', function() { var server, wallet, clock; beforeEach(function(done) { this.timeout(5000); clock = sinon.useFakeTimers('Date'); helpers.createAndJoinWallet(1, 1, function(s, w) { server = s; wallet = w; helpers.stubUtxos(server, wallet, _.range(1, 11), function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.1, TestData.copayers[0].privKey_1H_0); async.eachSeries(_.range(10), function(i, next) { clock.tick(10 * 1000); server.createTx(txOpts, function(err, tx) { should.not.exist(err); next(); }); }, function(err) { clock.restore(); return done(err); }); }); }); }); afterEach(function() { clock.restore(); }); it('should pull 4 txs, down to to time 60', function(done) { server.getTxs({ minTs: 60, limit: 8 }, function(err, txps) { should.not.exist(err); var times = _.pluck(txps, 'createdOn'); times.should.deep.equal([100, 90, 80, 70, 60]); done(); }); }); it('should pull the first 5 txs', function(done) { server.getTxs({ maxTs: 50, limit: 5 }, function(err, txps) { should.not.exist(err); var times = _.pluck(txps, 'createdOn'); times.should.deep.equal([50, 40, 30, 20, 10]); done(); }); }); it('should pull the last 4 txs', function(done) { server.getTxs({ limit: 4 }, function(err, txps) { should.not.exist(err); var times = _.pluck(txps, 'createdOn'); times.should.deep.equal([100, 90, 80, 70]); done(); }); }); it('should pull all txs', function(done) { server.getTxs({}, function(err, txps) { should.not.exist(err); var times = _.pluck(txps, 'createdOn'); times.should.deep.equal([100, 90, 80, 70, 60, 50, 40, 30, 20, 10]); done(); }); }); it('should txs from times 50 to 70', function(done) { server.getTxs({ minTs: 50, maxTs: 70, }, function(err, txps) { should.not.exist(err); var times = _.pluck(txps, 'createdOn'); times.should.deep.equal([70, 60, 50]); done(); }); }); }); describe('#getNotifications', function() { var clock; var server, wallet; beforeEach(function(done) { clock = sinon.useFakeTimers(10 * 1000, 'Date'); helpers.createAndJoinWallet(1, 1, function(s, w) { server = s; wallet = w; helpers.stubUtxos(server, wallet, _.range(4), function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.01, TestData.copayers[0].privKey_1H_0); async.eachSeries(_.range(3), function(i, next) { clock.tick(25 * 1000); server.createTx(txOpts, function(err, tx) { should.not.exist(err); next(); }); }, function(err) { clock.tick(20 * 1000); return done(err); }); }); }); }); afterEach(function() { clock.restore(); }); it('should pull all notifications', function(done) { server.getNotifications({}, function(err, notifications) { should.not.exist(err); var types = _.pluck(notifications, 'type'); types.should.deep.equal(['NewCopayer', 'NewAddress', 'NewAddress', 'NewTxProposal', 'NewTxProposal', 'NewTxProposal']); var walletIds = _.uniq(_.pluck(notifications, 'walletId')); walletIds.length.should.equal(1); walletIds[0].should.equal(wallet.id); var creators = _.uniq(_.compact(_.pluck(notifications, 'creatorId'))); creators.length.should.equal(1); creators[0].should.equal(wallet.copayers[0].id); done(); }); }); it('should pull new block notifications along with wallet notifications in the last 60 seconds', function(done) { // Simulate new block notification server.walletId = 'livenet'; server._notify('NewBlock', { hash: 'dummy hash', }, { isGlobal: true }, function(err) { should.not.exist(err); server.walletId = 'testnet'; server._notify('NewBlock', { hash: 'dummy hash', }, { isGlobal: true }, function(err) { should.not.exist(err); server.walletId = wallet.id; server.getNotifications({ minTs: +Date.now() - (60 * 1000), }, function(err, notifications) { should.not.exist(err); var types = _.pluck(notifications, 'type'); types.should.deep.equal(['NewTxProposal', 'NewTxProposal', 'NewBlock']); var walletIds = _.uniq(_.pluck(notifications, 'walletId')); walletIds.length.should.equal(1); walletIds[0].should.equal(wallet.id); done(); }); }); }); }); it('should pull notifications in the last 60 seconds', function(done) { server.getNotifications({ minTs: +Date.now() - (60 * 1000), }, function(err, notifications) { should.not.exist(err); var types = _.pluck(notifications, 'type'); types.should.deep.equal(['NewTxProposal', 'NewTxProposal']); done(); }); }); it('should pull notifications after a given notification id', function(done) { server.getNotifications({}, function(err, notifications) { should.not.exist(err); var from = _.first(_.takeRight(notifications, 2)).id; // second to last server.getNotifications({ notificationId: from, minTs: +Date.now() - (60 * 1000), }, function(err, res) { should.not.exist(err); res.length.should.equal(1); res[0].id.should.equal(_.first(_.takeRight(notifications)).id); done(); }); }); }); it('should return empty if no notifications found after a given id', function(done) { server.getNotifications({}, function(err, notifications) { should.not.exist(err); var from = _.first(_.takeRight(notifications)).id; // last one server.getNotifications({ notificationId: from, }, function(err, res) { should.not.exist(err); res.length.should.equal(0); done(); }); }); }); it('should return empty if no notifications exist in the given timespan', function(done) { clock.tick(100 * 1000); server.getNotifications({ minTs: +Date.now() - (60 * 1000), }, function(err, res) { should.not.exist(err); res.length.should.equal(0); done(); }); }); it('should contain walletId & creatorId on NewCopayer', function(done) { server.getNotifications({}, function(err, notifications) { should.not.exist(err); var newCopayer = notifications[0]; newCopayer.type.should.equal('NewCopayer'); newCopayer.walletId.should.equal(wallet.id); newCopayer.creatorId.should.equal(wallet.copayers[0].id); done(); }); }); it('should notify sign and acceptance', function(done) { server.getPendingTxs({}, function(err, txs) { blockchainExplorer.broadcast = sinon.stub().callsArgWith(1, 'broadcast error'); var tx = txs[0]; var signatures = helpers.clientSign(tx, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: tx.id, signatures: signatures, }, function(err) { server.getNotifications({ minTs: Date.now(), }, function(err, notifications) { should.not.exist(err); notifications.length.should.equal(2); var types = _.pluck(notifications, 'type'); types.should.deep.equal(['TxProposalAcceptedBy', 'TxProposalFinallyAccepted']); done(); }); }); }); }); it('should notify rejection', function(done) { server.getPendingTxs({}, function(err, txs) { var tx = txs[1]; server.rejectTx({ txProposalId: tx.id, }, function(err) { should.not.exist(err); server.getNotifications({ minTs: Date.now(), }, function(err, notifications) { should.not.exist(err); notifications.length.should.equal(2); var types = _.pluck(notifications, 'type'); types.should.deep.equal(['TxProposalRejectedBy', 'TxProposalFinallyRejected']); done(); }); }); }); }); it('should notify sign, acceptance, and broadcast, and emit', function(done) { server.getPendingTxs({}, function(err, txs) { var tx = txs[2]; var signatures = helpers.clientSign(tx, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: tx.id, signatures: signatures, }, function(err) { should.not.exist(err); helpers.stubBroadcast(); server.broadcastTx({ txProposalId: tx.id }, function(err, txp) { should.not.exist(err); server.getNotifications({ minTs: Date.now(), }, function(err, notifications) { should.not.exist(err); notifications.length.should.equal(3); var types = _.pluck(notifications, 'type'); types.should.deep.equal(['TxProposalAcceptedBy', 'TxProposalFinallyAccepted', 'NewOutgoingTx']); done(); }); }); }); }); }); it('should notify sign, acceptance, and broadcast, and emit (with 3rd party broadcast', function(done) { server.getPendingTxs({}, function(err, txs) { var tx = txs[2]; var signatures = helpers.clientSign(tx, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: tx.id, signatures: signatures, }, function(err) { should.not.exist(err); blockchainExplorer.broadcast = sinon.stub().callsArgWith(1, 'err'); blockchainExplorer.getTransaction = sinon.stub().callsArgWith(1, null, { txid: 11 }); server.broadcastTx({ txProposalId: tx.id }, function(err, txp) { should.not.exist(err); server.getNotifications({ minTs: Date.now(), }, function(err, notifications) { should.not.exist(err); notifications.length.should.equal(3); var types = _.pluck(notifications, 'type'); types.should.deep.equal(['TxProposalAcceptedBy', 'TxProposalFinallyAccepted', 'NewOutgoingTxByThirdParty']); done(); }); }); }); }); }); }); describe('#removeWallet', function() { var server, wallet, clock; beforeEach(function(done) { helpers.createAndJoinWallet(1, 1, function(s, w) { server = s; wallet = w; helpers.stubUtxos(server, wallet, _.range(2), function() { var txOpts = { toAddress: '18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', amount: helpers.toSatoshi(0.1), }; async.eachSeries(_.range(2), function(i, next) { server.createTx(txOpts, function(err, tx) { next(); }); }, done); }); }); }); it('should delete a wallet', function(done) { server.removeWallet({}, function(err) { should.not.exist(err); server.getWallet({}, function(err, w) { should.exist(err); err.code.should.equal('WALLET_NOT_FOUND'); should.not.exist(w); async.parallel([ function(next) { server.storage.fetchAddresses(wallet.id, function(err, items) { items.length.should.equal(0); next(); }); }, function(next) { server.storage.fetchTxs(wallet.id, {}, function(err, items) { items.length.should.equal(0); next(); }); }, function(next) { server.storage.fetchNotifications(wallet.id, null, 0, function(err, items) { items.length.should.equal(0); next(); }); }, ], function(err) { should.not.exist(err); done(); }); }); }); }); // creates 2 wallet, and deletes only 1. it('should delete a wallet, and only that wallet', function(done) { var server2, wallet2; async.series([ function(next) { helpers.createAndJoinWallet(1, 1, { offset: 1 }, function(s, w) { server2 = s; wallet2 = w; helpers.stubUtxos(server2, wallet2, _.range(1, 3), function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.1, TestData.copayers[1].privKey_1H_0, { message: 'some message' }); async.eachSeries(_.range(2), function(i, next) { server2.createTx(txOpts, function(err, tx) { should.not.exist(err); next(err); }); }, next); }); }); }, function(next) { server.removeWallet({}, next); }, function(next) { server.getWallet({}, function(err, wallet) { should.exist(err); err.code.should.equal('WALLET_NOT_FOUND'); next(); }); }, function(next) { server2.getWallet({}, function(err, wallet) { should.not.exist(err); should.exist(wallet); wallet.id.should.equal(wallet2.id); next(); }); }, function(next) { server2.getMainAddresses({}, function(err, addresses) { should.not.exist(err); should.exist(addresses); addresses.length.should.above(0); next(); }); }, function(next) { server2.getTxs({}, function(err, txs) { should.not.exist(err); should.exist(txs); txs.length.should.equal(2); next(); }); }, function(next) { server2.getNotifications({}, function(err, notifications) { should.not.exist(err); should.exist(notifications); notifications.length.should.above(0); next(); }); }, ], function(err) { should.not.exist(err); done(); }); }); }); describe('#removePendingTx', function() { var server, wallet, txp; beforeEach(function(done) { helpers.createAndJoinWallet(2, 3, function(s, w) { server = s; wallet = w; helpers.stubUtxos(server, wallet, [100, 200], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { server.getPendingTxs({}, function(err, txs) { txp = txs[0]; done(); }); }); }); }); }); it('should allow creator to remove an unsigned TX', function(done) { server.removePendingTx({ txProposalId: txp.id }, function(err) { should.not.exist(err); server.getPendingTxs({}, function(err, txs) { txs.length.should.equal(0); done(); }); }); }); it('should allow creator to remove a signed TX by himself', function(done) { var signatures = helpers.clientSign(txp, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: txp.id, signatures: signatures, }, function(err) { should.not.exist(err); server.removePendingTx({ txProposalId: txp.id }, function(err) { should.not.exist(err); server.getPendingTxs({}, function(err, txs) { txs.length.should.equal(0); done(); }); }); }); }); it('should fail to remove non-pending TX', function(done) { async.waterfall([ function(next) { var signatures = helpers.clientSign(txp, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: txp.id, signatures: signatures, }, function(err) { should.not.exist(err); next(); }); }, function(next) { helpers.getAuthServer(wallet.copayers[1].id, function(server) { server.rejectTx({ txProposalId: txp.id, }, function(err) { should.not.exist(err); next(); }); }); }, function(next) { helpers.getAuthServer(wallet.copayers[2].id, function(server) { server.rejectTx({ txProposalId: txp.id, }, function(err) { should.not.exist(err); next(); }); }); }, function(next) { server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs.should.be.empty; next(); }); }, function(next) { server.removePendingTx({ txProposalId: txp.id }, function(err) { should.exist(err); err.code.should.equal('TX_NOT_PENDING'); done(); }); }, ]); }); it('should not allow non-creator copayer to remove an unsigned TX ', function(done) { helpers.getAuthServer(wallet.copayers[1].id, function(server2) { server2.removePendingTx({ txProposalId: txp.id }, function(err) { should.exist(err); err.code.should.contain('TX_CANNOT_REMOVE'); server2.getPendingTxs({}, function(err, txs) { txs.length.should.equal(1); done(); }); }); }); }); it('should not allow creator copayer to remove a TX signed by other copayer, in less than 24hrs', function(done) { helpers.getAuthServer(wallet.copayers[1].id, function(server2) { var signatures = helpers.clientSign(txp, TestData.copayers[1].xPrivKey); server2.signTx({ txProposalId: txp.id, signatures: signatures, }, function(err) { should.not.exist(err); server.removePendingTx({ txProposalId: txp.id }, function(err) { err.code.should.equal('TX_CANNOT_REMOVE'); err.message.should.contain('Cannot remove'); done(); }); }); }); }); it('should allow creator copayer to remove a TX rejected by other copayer, in less than 24hrs', function(done) { helpers.getAuthServer(wallet.copayers[1].id, function(server2) { var signatures = helpers.clientSign(txp, TestData.copayers[1].xPrivKey); server2.rejectTx({ txProposalId: txp.id, signatures: signatures, }, function(err) { should.not.exist(err); server.removePendingTx({ txProposalId: txp.id }, function(err) { should.not.exist(err); done(); }); }); }); }); it('should allow creator copayer to remove a TX signed by other copayer, after 24hrs', function(done) { helpers.getAuthServer(wallet.copayers[1].id, function(server2) { var signatures = helpers.clientSign(txp, TestData.copayers[1].xPrivKey); server2.signTx({ txProposalId: txp.id, signatures: signatures, }, function(err) { should.not.exist(err); server.getPendingTxs({}, function(err, txs) { should.not.exist(err); txs[0].deleteLockTime.should.be.above(WalletService.DELETE_LOCKTIME - 10); var clock = sinon.useFakeTimers(Date.now() + 1 + 24 * 3600 * 1000, 'Date'); server.removePendingTx({ txProposalId: txp.id }, function(err) { should.not.exist(err); clock.restore(); done(); }); }); }); }); }); it('should allow other copayer to remove a TX signed, after 24hrs', function(done) { helpers.getAuthServer(wallet.copayers[1].id, function(server2) { var signatures = helpers.clientSign(txp, TestData.copayers[1].xPrivKey); server2.signTx({ txProposalId: txp.id, signatures: signatures, }, function(err) { should.not.exist(err); var clock = sinon.useFakeTimers(Date.now() + 2000 + WalletService.DELETE_LOCKTIME * 1000, 'Date'); server2.removePendingTx({ txProposalId: txp.id }, function(err) { should.not.exist(err); clock.restore(); done(); }); }); }); }); }); describe('#getTxHistory', function() { var server, wallet, mainAddresses, changeAddresses; beforeEach(function(done) { helpers.createAndJoinWallet(1, 1, function(s, w) { server = s; wallet = w; helpers.createAddresses(server, wallet, 1, 1, function(main, change) { mainAddresses = main; changeAddresses = change; done(); }); }); }); it('should get tx history from insight', function(done) { helpers.stubHistory(TestData.history); server.getTxHistory({}, function(err, txs) { should.not.exist(err); should.exist(txs); txs.length.should.equal(2); done(); }); }); it('should get tx history for incoming txs', function(done) { server._normalizeTxHistory = sinon.stub().returnsArg(0); var txs = [{ txid: '1', confirmations: 1, fees: 100, time: 20, inputs: [{ address: 'external', amount: 500, }], outputs: [{ address: mainAddresses[0].address, amount: 200, }], }]; helpers.stubHistory(txs); server.getTxHistory({}, function(err, txs) { should.not.exist(err); should.exist(txs); txs.length.should.equal(1); var tx = txs[0]; tx.action.should.equal('received'); tx.amount.should.equal(200); tx.fees.should.equal(100); tx.time.should.equal(20); done(); }); }); it('should get tx history for outgoing txs', function(done) { server._normalizeTxHistory = sinon.stub().returnsArg(0); var txs = [{ txid: '1', confirmations: 1, fees: 100, time: 1, inputs: [{ address: mainAddresses[0].address, amount: 500, }], outputs: [{ address: 'external', amount: 400, }], }]; helpers.stubHistory(txs); server.getTxHistory({}, function(err, txs) { should.not.exist(err); should.exist(txs); txs.length.should.equal(1); var tx = txs[0]; tx.action.should.equal('sent'); tx.amount.should.equal(400); tx.fees.should.equal(100); tx.time.should.equal(1); done(); }); }); it('should get tx history for outgoing txs + change', function(done) { server._normalizeTxHistory = sinon.stub().returnsArg(0); var txs = [{ txid: '1', confirmations: 1, fees: 100, time: 1, inputs: [{ address: mainAddresses[0].address, amount: 500, }], outputs: [{ address: 'external', amount: 300, }, { address: changeAddresses[0].address, amount: 100, }], }]; helpers.stubHistory(txs); server.getTxHistory({}, function(err, txs) { should.not.exist(err); should.exist(txs); txs.length.should.equal(1); var tx = txs[0]; tx.action.should.equal('sent'); tx.amount.should.equal(300); tx.fees.should.equal(100); tx.outputs[0].address.should.equal('external'); tx.outputs[0].amount.should.equal(300); done(); }); }); it('should get tx history with accepted proposal', function(done) { server._normalizeTxHistory = sinon.stub().returnsArg(0); var external = '18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7'; helpers.stubUtxos(server, wallet, [100, 200], function(utxos) { var outputs = [{ toAddress: external, amount: 50, message: undefined // no message }, { toAddress: external, amount: 30, message: 'message #2' }]; var txOpts = helpers.createProposalOpts(Model.TxProposal.Types.MULTIPLEOUTPUTS, outputs, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); var signatures = helpers.clientSign(tx, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: tx.id, signatures: signatures, }, function(err, tx) { should.not.exist(err); helpers.stubBroadcast(); server.broadcastTx({ txProposalId: tx.id }, function(err, txp) { should.not.exist(err); var txs = [{ txid: txp.txid, confirmations: 1, fees: 5460, time: 1, inputs: [{ address: tx.inputs[0].address, amount: utxos[0].satoshis, }], outputs: [{ address: changeAddresses[0].address, amount: helpers.toSatoshi(20) - 5460, }, { address: external, amount: helpers.toSatoshi(50) }, { address: external, amount: helpers.toSatoshi(30) }] }]; helpers.stubHistory(txs); server.getTxHistory({}, function(err, txs) { should.not.exist(err); should.exist(txs); txs.length.should.equal(1); var tx = txs[0]; tx.action.should.equal('sent'); tx.amount.should.equal(helpers.toSatoshi(80)); tx.message.should.equal('some message'); tx.addressTo.should.equal(external); tx.actions.length.should.equal(1); tx.actions[0].type.should.equal('accept'); tx.actions[0].copayerName.should.equal('copayer 1'); tx.proposalType.should.equal(Model.TxProposal.Types.MULTIPLEOUTPUTS); tx.outputs[0].address.should.equal(external); tx.outputs[0].amount.should.equal(helpers.toSatoshi(50)); should.not.exist(tx.outputs[0].message); should.not.exist(tx.outputs[0]['isMine']); should.not.exist(tx.outputs[0]['isChange']); tx.outputs[1].address.should.equal(external); tx.outputs[1].amount.should.equal(helpers.toSatoshi(30)); should.exist(tx.outputs[1].message); tx.outputs[1].message.should.equal('message #2'); done(); }); }); }); }); }); }); it('should get various paginated tx history', function(done) { var testCases = [{ opts: {}, expected: [50, 40, 30, 20, 10], }, { opts: { skip: 1, limit: 3, }, expected: [40, 30, 20], }, { opts: { skip: 1, limit: 2, }, expected: [40, 30], }, { opts: { skip: 2, }, expected: [30, 20, 10], }, { opts: { limit: 4, }, expected: [50, 40, 30, 20], }, { opts: { skip: 0, limit: 3, }, expected: [50, 40, 30], }, { opts: { skip: 0, limit: 0, }, expected: [], }, { opts: { skip: 4, limit: 20, }, expected: [10], }, { opts: { skip: 20, limit: 1, }, expected: [], }]; server._normalizeTxHistory = sinon.stub().returnsArg(0); var timestamps = [50, 40, 30, 20, 10]; var txs = _.map(timestamps, function(ts, idx) { return { txid: (idx + 1).toString(), confirmations: ts / 10, fees: 100, time: ts, inputs: [{ address: 'external', amount: 500, }], outputs: [{ address: mainAddresses[0].address, amount: 200, }], }; }); helpers.stubHistory(txs); async.each(testCases, function(testCase, next) { server.getTxHistory(testCase.opts, function(err, txs) { should.not.exist(err); should.exist(txs); _.pluck(txs, 'time').should.deep.equal(testCase.expected); next(); }); }, done); }); it('should fail gracefully if unable to reach the blockchain', function(done) { blockchainExplorer.getTransactions = sinon.stub().callsArgWith(3, 'dummy error'); server.getTxHistory({}, function(err, txs) { should.exist(err); err.toString().should.equal('dummy error'); done(); }); }); it('should handle invalid tx in history ', function(done) { var h = _.clone(TestData.history); h.push({ txid: 'xx' }) helpers.stubHistory(h); server.getTxHistory({}, function(err, txs) { should.not.exist(err); should.exist(txs); txs.length.should.equal(3); txs[2].action.should.equal('invalid'); done(); }); }); }); describe('#scan', function() { var server, wallet; var scanConfigOld = WalletService.SCAN_CONFIG; describe('1-of-1 wallet (BIP44 & P2PKH)', function() { beforeEach(function(done) { this.timeout(5000); WalletService.SCAN_CONFIG.maxGap = 2; helpers.createAndJoinWallet(1, 1, function(s, w) { server = s; wallet = w; done(); }); }); afterEach(function() { WalletService.SCAN_CONFIG = scanConfigOld; }); it('should scan main addresses', function(done) { helpers.stubAddressActivity( ['1L3z9LPd861FWQhf3vDn89Fnc9dkdBo2CG', // m/0/0 '1GdXraZ1gtoVAvBh49D4hK9xLm6SKgesoE', // m/0/2 '1FUzgKcyPJsYwDLUEVJYeE2N3KVaoxTjGS', // m/1/0 ]); var expectedPaths = [ 'm/0/0', 'm/0/1', 'm/0/2', 'm/1/0', ]; server.scan({}, function(err) { should.not.exist(err); server.getWallet({}, function(err, wallet) { should.not.exist(err); wallet.scanStatus.should.equal('success'); server.storage.fetchAddresses(wallet.id, function(err, addresses) { should.exist(addresses); addresses.length.should.equal(expectedPaths.length); var paths = _.pluck(addresses, 'path'); _.difference(paths, expectedPaths).length.should.equal(0); server.createAddress({}, function(err, address) { should.not.exist(err); address.path.should.equal('m/0/3'); done(); }); }); }); }); }); it('should not go beyond max gap', function(done) { helpers.stubAddressActivity( ['1L3z9LPd861FWQhf3vDn89Fnc9dkdBo2CG', // m/0/0 '1GdXraZ1gtoVAvBh49D4hK9xLm6SKgesoE', // m/0/2 '1DY9exavapgnCUWDnSTJe1BPzXcpgwAQC4', // m/0/5 '1LD7Cr68LvBPTUeXrr6YXfGrogR7TVj3WQ', // m/1/3 ]); var expectedPaths = [ 'm/0/0', 'm/0/1', 'm/0/2', ]; server.scan({}, function(err) { should.not.exist(err); server.getWallet({}, function(err, wallet) { should.not.exist(err); wallet.scanStatus.should.equal('success'); server.storage.fetchAddresses(wallet.id, function(err, addresses) { should.exist(addresses); addresses.length.should.equal(expectedPaths.length); var paths = _.pluck(addresses, 'path'); _.difference(paths, expectedPaths).length.should.equal(0); server.createAddress({}, function(err, address) { should.not.exist(err); address.path.should.equal('m/0/3'); // A rescan should see the m/0/5 address initially beyond the gap server.scan({}, function(err) { server.createAddress({}, function(err, address) { should.not.exist(err); address.path.should.equal('m/0/6'); done(); }); }); }); }); }); }); }); it('should not affect indexes on new wallet', function(done) { helpers.stubAddressActivity([]); server.scan({}, function(err) { should.not.exist(err); server.getWallet({}, function(err, wallet) { should.not.exist(err); wallet.scanStatus.should.equal('success'); server.storage.fetchAddresses(wallet.id, function(err, addresses) { should.not.exist(err); addresses.length.should.equal(0); server.createAddress({}, function(err, address) { should.not.exist(err); address.path.should.equal('m/0/0'); done(); }); }); }); }); }); it('should restore wallet balance', function(done) { async.waterfall([ function(next) { helpers.stubUtxos(server, wallet, [1, 2, 3], function(utxos) { should.exist(utxos); helpers.stubAddressActivity(_.pluck(utxos, 'address')); server.getBalance({}, function(err, balance) { balance.totalAmount.should.equal(helpers.toSatoshi(6)); next(null, server, wallet); }); }); }, function(server, wallet, next) { server.removeWallet({}, function(err) { next(err); }); }, function(next) { // NOTE: this works because it creates the exact same wallet! helpers.createAndJoinWallet(1, 1, function(server, wallet) { server.getBalance({}, function(err, balance) { balance.totalAmount.should.equal(0); next(null, server, wallet); }); }); }, function(server, wallet, next) { server.scan({}, function(err) { should.not.exist(err); server.getBalance(wallet.id, function(err, balance) { balance.totalAmount.should.equal(helpers.toSatoshi(6)); next(); }) }); }, ], function(err) { should.not.exist(err); done(); }); }); it('should abort scan if there is an error checking address activity', function(done) { blockchainExplorer.getAddressActivity = sinon.stub().callsArgWith(1, 'dummy error'); server.scan({}, function(err) { should.exist(err); err.toString().should.equal('dummy error'); server.getWallet({}, function(err, wallet) { should.not.exist(err); wallet.scanStatus.should.equal('error'); wallet.addressManager.receiveAddressIndex.should.equal(0); wallet.addressManager.changeAddressIndex.should.equal(0); server.storage.fetchAddresses(wallet.id, function(err, addresses) { should.not.exist(err); addresses.should.be.empty; done(); }); }); }); }); }); describe('shared wallet (BIP45)', function() { beforeEach(function(done) { this.timeout(5000); WalletService.SCAN_CONFIG.maxGap = 2; helpers.createAndJoinWallet(1, 2, { supportBIP44AndP2PKH: false }, function(s, w) { server = s; wallet = w; done(); }); }); afterEach(function() { WalletService.SCAN_CONFIG = scanConfigOld; }); it('should scan main addresses', function(done) { helpers.stubAddressActivity( ['39AA1Y2VvPJhV3RFbc7cKbUax1WgkPwweR', // m/2147483647/0/0 '3QX2MNSijnhCALBmUVnDo5UGPj3SEGASWx', // m/2147483647/0/2 '3MzGaz4KKX66w8ShKaR536ZqzVvREBqqYu', // m/2147483647/1/0 ]); var expectedPaths = [ 'm/2147483647/0/0', 'm/2147483647/0/1', 'm/2147483647/0/2', 'm/2147483647/1/0', ]; server.scan({}, function(err) { should.not.exist(err); server.getWallet({}, function(err, wallet) { should.not.exist(err); wallet.scanStatus.should.equal('success'); server.storage.fetchAddresses(wallet.id, function(err, addresses) { should.exist(addresses); addresses.length.should.equal(expectedPaths.length); var paths = _.pluck(addresses, 'path'); _.difference(paths, expectedPaths).length.should.equal(0); server.createAddress({}, function(err, address) { should.not.exist(err); address.path.should.equal('m/2147483647/0/3'); done(); }); }); }); }); }); it('should scan main addresses & copayer addresses', function(done) { helpers.stubAddressActivity( ['39AA1Y2VvPJhV3RFbc7cKbUax1WgkPwweR', // m/2147483647/0/0 '3MzGaz4KKX66w8ShKaR536ZqzVvREBqqYu', // m/2147483647/1/0 '3BYoynejwBH9q4Jhr9m9P5YTnLTu57US6g', // m/0/0/1 '37Pb8c32hzm16tCZaVHj4Dtjva45L2a3A3', // m/1/1/0 '32TB2n283YsXdseMqUm9zHSRcfS5JxTWxx', // m/1/0/0 ]); var expectedPaths = [ 'm/2147483647/0/0', 'm/2147483647/1/0', 'm/0/0/0', 'm/0/0/1', 'm/1/0/0', 'm/1/1/0', ]; server.scan({ includeCopayerBranches: true }, function(err) { should.not.exist(err); server.storage.fetchAddresses(wallet.id, function(err, addresses) { should.exist(addresses); addresses.length.should.equal(expectedPaths.length); var paths = _.pluck(addresses, 'path'); _.difference(paths, expectedPaths).length.should.equal(0); done(); }) }); }); }); }); describe('#startScan', function() { var server, wallet; var scanConfigOld = WalletService.SCAN_CONFIG; beforeEach(function(done) { this.timeout(5000); WalletService.SCAN_CONFIG.maxGap = 2; helpers.createAndJoinWallet(1, 1, { supportBIP44AndP2PKH: false }, function(s, w) { server = s; wallet = w; done(); }); }); afterEach(function() { WalletService.SCAN_CONFIG = scanConfigOld; server.messageBroker.removeAllListeners(); }); it('should start an asynchronous scan', function(done) { helpers.stubAddressActivity( ['3GvvHimEMk2GBZnPxTF89GHZL6QhZjUZVs', // m/2147483647/0/0 '37pd1jjTUiGBh8JL2hKLDgsyrhBoiz5vsi', // m/2147483647/0/2 '3C3tBn8Sr1wHTp2brMgYsj9ncB7R7paYuB', // m/2147483647/1/0 ]); var expectedPaths = [ 'm/2147483647/0/0', 'm/2147483647/0/1', 'm/2147483647/0/2', 'm/2147483647/1/0', ]; server.messageBroker.onMessage(function(n) { if (n.type == 'ScanFinished') { server.getWallet({}, function(err, wallet) { should.exist(wallet.scanStatus); wallet.scanStatus.should.equal('success'); should.not.exist(n.creatorId); server.storage.fetchAddresses(wallet.id, function(err, addresses) { should.exist(addresses); addresses.length.should.equal(expectedPaths.length); var paths = _.pluck(addresses, 'path'); _.difference(paths, expectedPaths).length.should.equal(0); server.createAddress({}, function(err, address) { should.not.exist(err); address.path.should.equal('m/2147483647/0/3'); done(); }); }) }); } }); server.startScan({}, function(err) { should.not.exist(err); }); }); it('should set scan status error when unable to reach blockchain', function(done) { blockchainExplorer.getAddressActivity = sinon.stub().yields('dummy error'); server.messageBroker.onMessage(function(n) { if (n.type == 'ScanFinished') { should.exist(n.data.error); server.getWallet({}, function(err, wallet) { should.exist(wallet.scanStatus); wallet.scanStatus.should.equal('error'); done(); }); } }); server.startScan({}, function(err) { should.not.exist(err); }); }); it('should start multiple asynchronous scans for different wallets', function(done) { helpers.stubAddressActivity(['3K2VWMXheGZ4qG35DyGjA2dLeKfaSr534A']); WalletService.SCAN_CONFIG.scanWindow = 1; var scans = 0; server.messageBroker.onMessage(function(n) { if (n.type == 'ScanFinished') { scans++; if (scans == 2) done(); } }); // Create a second wallet var server2 = new WalletService(); var opts = { name: 'second wallet', m: 1, n: 1, pubKey: TestData.keyPair.pub, }; server2.createWallet(opts, function(err, walletId) { should.not.exist(err); var copayerOpts = helpers.getSignedCopayerOpts({ walletId: walletId, name: 'copayer 1', xPubKey: TestData.copayers[3].xPubKey_45H, requestPubKey: TestData.copayers[3].pubKey_1H_0, }); server.joinWallet(copayerOpts, function(err, result) { should.not.exist(err); helpers.getAuthServer(result.copayerId, function(server2) { server.startScan({}, function(err) { should.not.exist(err); scans.should.equal(0); }); server2.startScan({}, function(err) { should.not.exist(err); scans.should.equal(0); }); scans.should.equal(0); }); }); }); }); }); describe('Legacy', function() { describe('Fees', function() { var server, wallet; beforeEach(function(done) { helpers.createAndJoinWallet(2, 3, function(s, w) { server = s; wallet = w; done(); }); }); it('should create a tx from legacy (bwc-0.0.*) client', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); var verifyStub = sinon.stub(WalletService.prototype, '_verifySignature'); verifyStub.returns(true); WalletService.getInstanceWithAuth({ copayerId: wallet.copayers[0].id, message: 'dummy', signature: 'dummy', clientVersion: 'bwc-0.0.40', }, function(err, server) { should.not.exist(err); should.exist(server); verifyStub.restore(); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); tx.amount.should.equal(helpers.toSatoshi(80)); tx.fee.should.equal(WalletUtils.DEFAULT_FEE_PER_KB); done(); }); }); }); }); it('should not return error when fetching new txps from legacy (bwc-0.0.*) client', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); var verifyStub = sinon.stub(WalletService.prototype, '_verifySignature'); verifyStub.returns(true); WalletService.getInstanceWithAuth({ copayerId: wallet.copayers[0].id, message: 'dummy', signature: 'dummy', clientVersion: 'bwc-0.0.40', }, function(err, server) { verifyStub.restore(); should.not.exist(err); should.exist(server); server.getPendingTxs({}, function(err, txps) { should.not.exist(err); should.exist(txps); done(); }); }); }); }); }); it('should fail to sign tx from legacy (bwc-0.0.*) client', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); _.startsWith(tx.version, '1.').should.be.false; var verifyStub = sinon.stub(WalletService.prototype, '_verifySignature'); verifyStub.returns(true); WalletService.getInstanceWithAuth({ copayerId: wallet.copayers[0].id, message: 'dummy', signature: 'dummy', clientVersion: 'bwc-0.0.40', }, function(err, server) { var signatures = helpers.clientSign(tx, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: tx.id, signatures: signatures, }, function(err) { verifyStub.restore(); should.exist(err); err.code.should.equal('UPGRADE_NEEDED'); err.message.should.contain('sign this spend proposal'); done(); }); }); }); }); }); it('should create a tx from legacy (bwc-0.0.*) client and sign it from newer client', function(done) { helpers.stubUtxos(server, wallet, [100, 200], function() { var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 80, TestData.copayers[0].privKey_1H_0, { message: 'some message' }); var verifyStub = sinon.stub(WalletService.prototype, '_verifySignature'); verifyStub.returns(true); WalletService.getInstanceWithAuth({ copayerId: wallet.copayers[0].id, message: 'dummy', signature: 'dummy', clientVersion: 'bwc-0.0.40', }, function(err, server) { should.not.exist(err); should.exist(server); verifyStub.restore(); server.createTx(txOpts, function(err, tx) { should.not.exist(err); should.exist(tx); tx.amount.should.equal(helpers.toSatoshi(80)); tx.fee.should.equal(WalletUtils.DEFAULT_FEE_PER_KB); helpers.getAuthServer(wallet.copayers[0].id, function(server) { var signatures = helpers.clientSign(tx, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: tx.id, signatures: signatures, }, function(err) { should.not.exist(err); done(); }); }); }); }); }); }); it('should fail with insufficient fee when invoked from legacy (bwc-0.0.*) client', function(done) { helpers.stubUtxos(server, wallet, 1, function() { var verifyStub = sinon.stub(WalletService.prototype, '_verifySignature'); verifyStub.returns(true); WalletService.getInstanceWithAuth({ copayerId: wallet.copayers[0].id, message: 'dummy', signature: 'dummy', clientVersion: 'bwc-0.0.40', }, function(err, server) { should.not.exist(err); should.exist(server); verifyStub.restore(); var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.99995, TestData.copayers[0].privKey_1H_0); server.createTx(txOpts, function(err, tx) { should.exist(err); err.code.should.equal('INSUFFICIENT_FUNDS_FOR_FEE'); var txOpts = helpers.createSimpleProposalOpts('18PzpUFkFZE8zKWUPvfykkTxmB9oMR8qP7', 0.99995, TestData.copayers[0].privKey_1H_0, { feePerKb: 5000 }); server.createTx(txOpts, function(err, tx) { should.not.exist(err); tx.fee.should.equal(5000); // Sign it to make sure Bitcore doesn't complain about the fees var signatures = helpers.clientSign(tx, TestData.copayers[0].xPrivKey); server.signTx({ txProposalId: tx.id, signatures: signatures, }, function(err) { should.not.exist(err); done(); }); }); }); }); }); }); }); }); });
test existing addresses do not get rewinded on scan error
test/integration/server.js
test existing addresses do not get rewinded on scan error
<ide><path>est/integration/server.js <ide> }); <ide> }); <ide> <add> it('should not rewind already generated addresses on error', function(done) { <add> server.createAddress({}, function(err, address) { <add> should.not.exist(err); <add> address.path.should.equal('m/0/0'); <add> blockchainExplorer.getAddressActivity = sinon.stub().callsArgWith(1, 'dummy error'); <add> server.scan({}, function(err) { <add> should.exist(err); <add> err.toString().should.equal('dummy error'); <add> server.getWallet({}, function(err, wallet) { <add> should.not.exist(err); <add> wallet.scanStatus.should.equal('error'); <add> wallet.addressManager.receiveAddressIndex.should.equal(1); <add> wallet.addressManager.changeAddressIndex.should.equal(0); <add> server.createAddress({}, function(err, address) { <add> should.not.exist(err); <add> address.path.should.equal('m/0/1'); <add> done(); <add> }); <add> }); <add> }); <add> }); <add> }); <add> <ide> it('should restore wallet balance', function(done) { <ide> async.waterfall([ <ide>
Java
apache-2.0
3e7bf0fa20312acd476bfdfba45d4f828c5a1353
0
qwerty4030/elasticsearch,elancom/elasticsearch,anti-social/elasticsearch,Fsero/elasticsearch,fekaputra/elasticsearch,mbrukman/elasticsearch,fubuki/elasticsearch,libosu/elasticsearch,wittyameta/elasticsearch,mrorii/elasticsearch,milodky/elasticsearch,rmuir/elasticsearch,apepper/elasticsearch,Liziyao/elasticsearch,vrkansagara/elasticsearch,lightslife/elasticsearch,mkis-/elasticsearch,bestwpw/elasticsearch,gfyoung/elasticsearch,coding0011/elasticsearch,zhaocloud/elasticsearch,kcompher/elasticsearch,overcome/elasticsearch,djschny/elasticsearch,socialrank/elasticsearch,MichaelLiZhou/elasticsearch,kalburgimanjunath/elasticsearch,njlawton/elasticsearch,huypx1292/elasticsearch,springning/elasticsearch,hanst/elasticsearch,yongminxia/elasticsearch,Uiho/elasticsearch,s1monw/elasticsearch,onegambler/elasticsearch,petabytedata/elasticsearch,jeteve/elasticsearch,pritishppai/elasticsearch,geidies/elasticsearch,diendt/elasticsearch,NBSW/elasticsearch,alexbrasetvik/elasticsearch,glefloch/elasticsearch,spiegela/elasticsearch,martinstuga/elasticsearch,tkssharma/elasticsearch,lchennup/elasticsearch,lmenezes/elasticsearch,likaiwalkman/elasticsearch,EasonYi/elasticsearch,overcome/elasticsearch,girirajsharma/elasticsearch,drewr/elasticsearch,mikemccand/elasticsearch,Asimov4/elasticsearch,combinatorist/elasticsearch,girirajsharma/elasticsearch,jw0201/elastic,jpountz/elasticsearch,sreeramjayan/elasticsearch,opendatasoft/elasticsearch,aparo/elasticsearch,LeoYao/elasticsearch,henakamaMSFT/elasticsearch,ajhalani/elasticsearch,wimvds/elasticsearch,ricardocerq/elasticsearch,Charlesdong/elasticsearch,khiraiwa/elasticsearch,dongjoon-hyun/elasticsearch,jchampion/elasticsearch,huanzhong/elasticsearch,ajhalani/elasticsearch,strapdata/elassandra5-rc,brandonkearby/elasticsearch,liweinan0423/elasticsearch,szroland/elasticsearch,lks21c/elasticsearch,opendatasoft/elasticsearch,mcku/elasticsearch,sscarduzio/elasticsearch,kubum/elasticsearch,kcompher/elasticsearch,sposam/elasticsearch,kunallimaye/elasticsearch,ydsakyclguozi/elasticsearch,hydro2k/elasticsearch,sauravmondallive/elasticsearch,sdauletau/elasticsearch,lzo/elasticsearch-1,lchennup/elasticsearch,njlawton/elasticsearch,gmarz/elasticsearch,palecur/elasticsearch,djschny/elasticsearch,aglne/elasticsearch,Rygbee/elasticsearch,strapdata/elassandra5-rc,boliza/elasticsearch,davidvgalbraith/elasticsearch,uschindler/elasticsearch,mnylen/elasticsearch,dongjoon-hyun/elasticsearch,dataduke/elasticsearch,ulkas/elasticsearch,onegambler/elasticsearch,javachengwc/elasticsearch,zkidkid/elasticsearch,spiegela/elasticsearch,GlenRSmith/elasticsearch,rlugojr/elasticsearch,MaineC/elasticsearch,tkssharma/elasticsearch,MichaelLiZhou/elasticsearch,mute/elasticsearch,fred84/elasticsearch,mbrukman/elasticsearch,iantruslove/elasticsearch,ThalaivaStars/OrgRepo1,mjhennig/elasticsearch,socialrank/elasticsearch,hafkensite/elasticsearch,franklanganke/elasticsearch,fubuki/elasticsearch,polyfractal/elasticsearch,knight1128/elasticsearch,JackyMai/elasticsearch,djschny/elasticsearch,ouyangkongtong/elasticsearch,abhijitiitr/es,Helen-Zhao/elasticsearch,JervyShi/elasticsearch,ZTE-PaaS/elasticsearch,MisterAndersen/elasticsearch,acchen97/elasticsearch,AshishThakur/elasticsearch,markllama/elasticsearch,ajhalani/elasticsearch,lchennup/elasticsearch,F0lha/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,F0lha/elasticsearch,Collaborne/elasticsearch,18098924759/elasticsearch,Clairebi/ElasticsearchClone,golubev/elasticsearch,knight1128/elasticsearch,Microsoft/elasticsearch,beiske/elasticsearch,davidvgalbraith/elasticsearch,hirdesh2008/elasticsearch,myelin/elasticsearch,jimczi/elasticsearch,brandonkearby/elasticsearch,petabytedata/elasticsearch,nomoa/elasticsearch,feiqitian/elasticsearch,robin13/elasticsearch,maddin2016/elasticsearch,andrestc/elasticsearch,Flipkart/elasticsearch,gingerwizard/elasticsearch,opendatasoft/elasticsearch,rento19962/elasticsearch,dpursehouse/elasticsearch,Uiho/elasticsearch,Clairebi/ElasticsearchClone,PhaedrusTheGreek/elasticsearch,jaynblue/elasticsearch,markllama/elasticsearch,ivansun1010/elasticsearch,MetSystem/elasticsearch,zhiqinghuang/elasticsearch,knight1128/elasticsearch,zhiqinghuang/elasticsearch,Clairebi/ElasticsearchClone,Charlesdong/elasticsearch,s1monw/elasticsearch,PhaedrusTheGreek/elasticsearch,trangvh/elasticsearch,wittyameta/elasticsearch,dataduke/elasticsearch,strapdata/elassandra,jbertouch/elasticsearch,myelin/elasticsearch,jeteve/elasticsearch,hechunwen/elasticsearch,nezirus/elasticsearch,jbertouch/elasticsearch,rhoml/elasticsearch,karthikjaps/elasticsearch,ricardocerq/elasticsearch,tsohil/elasticsearch,Liziyao/elasticsearch,mcku/elasticsearch,sneivandt/elasticsearch,kunallimaye/elasticsearch,sjohnr/elasticsearch,awislowski/elasticsearch,golubev/elasticsearch,chirilo/elasticsearch,MaineC/elasticsearch,ckclark/elasticsearch,adrianbk/elasticsearch,kingaj/elasticsearch,Fsero/elasticsearch,scottsom/elasticsearch,s1monw/elasticsearch,tkssharma/elasticsearch,szroland/elasticsearch,Shekharrajak/elasticsearch,alexbrasetvik/elasticsearch,feiqitian/elasticsearch,martinstuga/elasticsearch,avikurapati/elasticsearch,LewayneNaidoo/elasticsearch,lmtwga/elasticsearch,kalburgimanjunath/elasticsearch,milodky/elasticsearch,KimTaehee/elasticsearch,NBSW/elasticsearch,markllama/elasticsearch,luiseduardohdbackup/elasticsearch,humandb/elasticsearch,zkidkid/elasticsearch,dylan8902/elasticsearch,Charlesdong/elasticsearch,Uiho/elasticsearch,mohsinh/elasticsearch,vorce/es-metrics,yuy168/elasticsearch,caengcjd/elasticsearch,F0lha/elasticsearch,amaliujia/elasticsearch,marcuswr/elasticsearch-dateline,hafkensite/elasticsearch,episerver/elasticsearch,amit-shar/elasticsearch,coding0011/elasticsearch,scottsom/elasticsearch,kenshin233/elasticsearch,kubum/elasticsearch,rento19962/elasticsearch,amaliujia/elasticsearch,lchennup/elasticsearch,sjohnr/elasticsearch,VukDukic/elasticsearch,naveenhooda2000/elasticsearch,overcome/elasticsearch,golubev/elasticsearch,wimvds/elasticsearch,kimchy/elasticsearch,Chhunlong/elasticsearch,luiseduardohdbackup/elasticsearch,codebunt/elasticsearch,kimchy/elasticsearch,marcuswr/elasticsearch-dateline,Brijeshrpatel9/elasticsearch,Shekharrajak/elasticsearch,wayeast/elasticsearch,i-am-Nathan/elasticsearch,khiraiwa/elasticsearch,hafkensite/elasticsearch,micpalmia/elasticsearch,lks21c/elasticsearch,schonfeld/elasticsearch,andrewvc/elasticsearch,AshishThakur/elasticsearch,Rygbee/elasticsearch,pritishppai/elasticsearch,masaruh/elasticsearch,NBSW/elasticsearch,sarwarbhuiyan/elasticsearch,acchen97/elasticsearch,JackyMai/elasticsearch,wimvds/elasticsearch,sarwarbhuiyan/elasticsearch,petabytedata/elasticsearch,slavau/elasticsearch,yynil/elasticsearch,sposam/elasticsearch,kcompher/elasticsearch,lydonchandra/elasticsearch,yanjunh/elasticsearch,kevinkluge/elasticsearch,mgalushka/elasticsearch,AshishThakur/elasticsearch,knight1128/elasticsearch,anti-social/elasticsearch,ouyangkongtong/elasticsearch,abhijitiitr/es,StefanGor/elasticsearch,jimhooker2002/elasticsearch,camilojd/elasticsearch,salyh/elasticsearch,strapdata/elassandra-test,zhaocloud/elasticsearch,rajanm/elasticsearch,vvcephei/elasticsearch,mgalushka/elasticsearch,masaruh/elasticsearch,camilojd/elasticsearch,aparo/elasticsearch,sdauletau/elasticsearch,PhaedrusTheGreek/elasticsearch,phani546/elasticsearch,cnfire/elasticsearch-1,overcome/elasticsearch,camilojd/elasticsearch,i-am-Nathan/elasticsearch,StefanGor/elasticsearch,jchampion/elasticsearch,loconsolutions/elasticsearch,vroyer/elasticassandra,Liziyao/elasticsearch,davidvgalbraith/elasticsearch,rlugojr/elasticsearch,truemped/elasticsearch,jbertouch/elasticsearch,huanzhong/elasticsearch,Fsero/elasticsearch,easonC/elasticsearch,iacdingping/elasticsearch,markharwood/elasticsearch,18098924759/elasticsearch,codebunt/elasticsearch,mbrukman/elasticsearch,wuranbo/elasticsearch,sneivandt/elasticsearch,kubum/elasticsearch,diendt/elasticsearch,avikurapati/elasticsearch,vrkansagara/elasticsearch,wittyameta/elasticsearch,HarishAtGitHub/elasticsearch,ThiagoGarciaAlves/elasticsearch,fernandozhu/elasticsearch,aglne/elasticsearch,schonfeld/elasticsearch,masterweb121/elasticsearch,wbowling/elasticsearch,wenpos/elasticsearch,ThiagoGarciaAlves/elasticsearch,wangtuo/elasticsearch,fooljohnny/elasticsearch,vorce/es-metrics,Charlesdong/elasticsearch,iamjakob/elasticsearch,apepper/elasticsearch,jpountz/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,Kakakakakku/elasticsearch,avikurapati/elasticsearch,henakamaMSFT/elasticsearch,rhoml/elasticsearch,drewr/elasticsearch,Brijeshrpatel9/elasticsearch,vrkansagara/elasticsearch,KimTaehee/elasticsearch,scorpionvicky/elasticsearch,rajanm/elasticsearch,sscarduzio/elasticsearch,jaynblue/elasticsearch,vinsonlou/elasticsearch,yuy168/elasticsearch,Ansh90/elasticsearch,qwerty4030/elasticsearch,truemped/elasticsearch,nrkkalyan/elasticsearch,loconsolutions/elasticsearch,MaineC/elasticsearch,jbertouch/elasticsearch,mnylen/elasticsearch,clintongormley/elasticsearch,MjAbuz/elasticsearch,hydro2k/elasticsearch,micpalmia/elasticsearch,himanshuag/elasticsearch,lightslife/elasticsearch,jchampion/elasticsearch,JervyShi/elasticsearch,zhaocloud/elasticsearch,rhoml/elasticsearch,zhiqinghuang/elasticsearch,mnylen/elasticsearch,andrestc/elasticsearch,avikurapati/elasticsearch,snikch/elasticsearch,iamjakob/elasticsearch,jprante/elasticsearch,andrejserafim/elasticsearch,sarwarbhuiyan/elasticsearch,zhiqinghuang/elasticsearch,C-Bish/elasticsearch,anti-social/elasticsearch,martinstuga/elasticsearch,MisterAndersen/elasticsearch,maddin2016/elasticsearch,ajhalani/elasticsearch,caengcjd/elasticsearch,mikemccand/elasticsearch,phani546/elasticsearch,fernandozhu/elasticsearch,cnfire/elasticsearch-1,thecocce/elasticsearch,chirilo/elasticsearch,vingupta3/elasticsearch,sscarduzio/elasticsearch,winstonewert/elasticsearch,wangtuo/elasticsearch,mjason3/elasticsearch,ThalaivaStars/OrgRepo1,rmuir/elasticsearch,yongminxia/elasticsearch,smflorentino/elasticsearch,YosuaMichael/elasticsearch,snikch/elasticsearch,xingguang2013/elasticsearch,elancom/elasticsearch,Brijeshrpatel9/elasticsearch,socialrank/elasticsearch,mjhennig/elasticsearch,nrkkalyan/elasticsearch,AndreKR/elasticsearch,mnylen/elasticsearch,micpalmia/elasticsearch,Kakakakakku/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,gingerwizard/elasticsearch,jeteve/elasticsearch,mcku/elasticsearch,ckclark/elasticsearch,sneivandt/elasticsearch,ricardocerq/elasticsearch,liweinan0423/elasticsearch,kimimj/elasticsearch,scorpionvicky/elasticsearch,feiqitian/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,skearns64/elasticsearch,humandb/elasticsearch,mohsinh/elasticsearch,sjohnr/elasticsearch,kalimatas/elasticsearch,Rygbee/elasticsearch,masterweb121/elasticsearch,nrkkalyan/elasticsearch,mgalushka/elasticsearch,markharwood/elasticsearch,javachengwc/elasticsearch,lightslife/elasticsearch,umeshdangat/elasticsearch,masterweb121/elasticsearch,boliza/elasticsearch,caengcjd/elasticsearch,Shekharrajak/elasticsearch,jango2015/elasticsearch,raishiv/elasticsearch,knight1128/elasticsearch,huypx1292/elasticsearch,mgalushka/elasticsearch,pritishppai/elasticsearch,wangtuo/elasticsearch,Chhunlong/elasticsearch,markharwood/elasticsearch,combinatorist/elasticsearch,lzo/elasticsearch-1,LeoYao/elasticsearch,lks21c/elasticsearch,salyh/elasticsearch,ESamir/elasticsearch,henakamaMSFT/elasticsearch,kubum/elasticsearch,opendatasoft/elasticsearch,socialrank/elasticsearch,areek/elasticsearch,AleksKochev/elasticsearch,fekaputra/elasticsearch,linglaiyao1314/elasticsearch,apepper/elasticsearch,ulkas/elasticsearch,MichaelLiZhou/elasticsearch,tkssharma/elasticsearch,Collaborne/elasticsearch,Charlesdong/elasticsearch,Widen/elasticsearch,ImpressTV/elasticsearch,Microsoft/elasticsearch,amit-shar/elasticsearch,Brijeshrpatel9/elasticsearch,polyfractal/elasticsearch,mortonsykes/elasticsearch,shreejay/elasticsearch,peschlowp/elasticsearch,nknize/elasticsearch,overcome/elasticsearch,ThiagoGarciaAlves/elasticsearch,pablocastro/elasticsearch,schonfeld/elasticsearch,dpursehouse/elasticsearch,Chhunlong/elasticsearch,Brijeshrpatel9/elasticsearch,skearns64/elasticsearch,rento19962/elasticsearch,khiraiwa/elasticsearch,xpandan/elasticsearch,xpandan/elasticsearch,zhaocloud/elasticsearch,aparo/elasticsearch,tebriel/elasticsearch,strapdata/elassandra-test,Siddartha07/elasticsearch,ThiagoGarciaAlves/elasticsearch,weipinghe/elasticsearch,petmit/elasticsearch,jimczi/elasticsearch,andrejserafim/elasticsearch,aglne/elasticsearch,wayeast/elasticsearch,alexshadow007/elasticsearch,sauravmondallive/elasticsearch,diendt/elasticsearch,kubum/elasticsearch,Asimov4/elasticsearch,obourgain/elasticsearch,Uiho/elasticsearch,iantruslove/elasticsearch,dataduke/elasticsearch,clintongormley/elasticsearch,coding0011/elasticsearch,hanst/elasticsearch,MjAbuz/elasticsearch,andrestc/elasticsearch,overcome/elasticsearch,lmtwga/elasticsearch,wimvds/elasticsearch,mrorii/elasticsearch,IanvsPoplicola/elasticsearch,tcucchietti/elasticsearch,ckclark/elasticsearch,ydsakyclguozi/elasticsearch,abhijitiitr/es,Collaborne/elasticsearch,mbrukman/elasticsearch,F0lha/elasticsearch,sauravmondallive/elasticsearch,jpountz/elasticsearch,andrestc/elasticsearch,alexbrasetvik/elasticsearch,IanvsPoplicola/elasticsearch,kkirsche/elasticsearch,codebunt/elasticsearch,nrkkalyan/elasticsearch,elancom/elasticsearch,GlenRSmith/elasticsearch,khiraiwa/elasticsearch,qwerty4030/elasticsearch,fforbeck/elasticsearch,ESamir/elasticsearch,nilabhsagar/elasticsearch,abibell/elasticsearch,markllama/elasticsearch,MjAbuz/elasticsearch,jimhooker2002/elasticsearch,ouyangkongtong/elasticsearch,C-Bish/elasticsearch,vietlq/elasticsearch,vorce/es-metrics,SaiprasadKrishnamurthy/elasticsearch,mm0/elasticsearch,hydro2k/elasticsearch,lmtwga/elasticsearch,ivansun1010/elasticsearch,elasticdog/elasticsearch,loconsolutions/elasticsearch,F0lha/elasticsearch,kenshin233/elasticsearch,luiseduardohdbackup/elasticsearch,pritishppai/elasticsearch,kalburgimanjunath/elasticsearch,martinstuga/elasticsearch,likaiwalkman/elasticsearch,alexshadow007/elasticsearch,mgalushka/elasticsearch,mjhennig/elasticsearch,gingerwizard/elasticsearch,springning/elasticsearch,glefloch/elasticsearch,humandb/elasticsearch,cwurm/elasticsearch,ckclark/elasticsearch,nomoa/elasticsearch,Fsero/elasticsearch,Shekharrajak/elasticsearch,sdauletau/elasticsearch,mmaracic/elasticsearch,thecocce/elasticsearch,beiske/elasticsearch,ZTE-PaaS/elasticsearch,Liziyao/elasticsearch,tahaemin/elasticsearch,AndreKR/elasticsearch,kingaj/elasticsearch,heng4fun/elasticsearch,ydsakyclguozi/elasticsearch,hechunwen/elasticsearch,iacdingping/elasticsearch,JackyMai/elasticsearch,humandb/elasticsearch,Rygbee/elasticsearch,apepper/elasticsearch,hirdesh2008/elasticsearch,pozhidaevak/elasticsearch,masterweb121/elasticsearch,Asimov4/elasticsearch,pozhidaevak/elasticsearch,KimTaehee/elasticsearch,aparo/elasticsearch,HonzaKral/elasticsearch,mkis-/elasticsearch,obourgain/elasticsearch,fekaputra/elasticsearch,ESamir/elasticsearch,fekaputra/elasticsearch,lydonchandra/elasticsearch,kkirsche/elasticsearch,robin13/elasticsearch,socialrank/elasticsearch,mjason3/elasticsearch,dpursehouse/elasticsearch,dongjoon-hyun/elasticsearch,JackyMai/elasticsearch,fernandozhu/elasticsearch,ThalaivaStars/OrgRepo1,infusionsoft/elasticsearch,himanshuag/elasticsearch,hechunwen/elasticsearch,jeteve/elasticsearch,kkirsche/elasticsearch,vietlq/elasticsearch,dylan8902/elasticsearch,likaiwalkman/elasticsearch,wimvds/elasticsearch,btiernay/elasticsearch,onegambler/elasticsearch,aparo/elasticsearch,jsgao0/elasticsearch,jeteve/elasticsearch,jango2015/elasticsearch,pozhidaevak/elasticsearch,fred84/elasticsearch,jango2015/elasticsearch,wangyuxue/elasticsearch,opendatasoft/elasticsearch,sdauletau/elasticsearch,wenpos/elasticsearch,clintongormley/elasticsearch,achow/elasticsearch,andrejserafim/elasticsearch,markharwood/elasticsearch,fekaputra/elasticsearch,camilojd/elasticsearch,Siddartha07/elasticsearch,JSCooke/elasticsearch,alexksikes/elasticsearch,HarishAtGitHub/elasticsearch,acchen97/elasticsearch,mbrukman/elasticsearch,kevinkluge/elasticsearch,clintongormley/elasticsearch,hechunwen/elasticsearch,mute/elasticsearch,dylan8902/elasticsearch,kingaj/elasticsearch,tebriel/elasticsearch,mohit/elasticsearch,HonzaKral/elasticsearch,karthikjaps/elasticsearch,hechunwen/elasticsearch,robin13/elasticsearch,mikemccand/elasticsearch,winstonewert/elasticsearch,uschindler/elasticsearch,rajanm/elasticsearch,markharwood/elasticsearch,zeroctu/elasticsearch,achow/elasticsearch,jsgao0/elasticsearch,a2lin/elasticsearch,mkis-/elasticsearch,Stacey-Gammon/elasticsearch,kevinkluge/elasticsearch,heng4fun/elasticsearch,luiseduardohdbackup/elasticsearch,dylan8902/elasticsearch,vingupta3/elasticsearch,winstonewert/elasticsearch,petabytedata/elasticsearch,milodky/elasticsearch,palecur/elasticsearch,nazarewk/elasticsearch,strapdata/elassandra,mgalushka/elasticsearch,henakamaMSFT/elasticsearch,lchennup/elasticsearch,18098924759/elasticsearch,mohsinh/elasticsearch,anti-social/elasticsearch,anti-social/elasticsearch,scorpionvicky/elasticsearch,HarishAtGitHub/elasticsearch,golubev/elasticsearch,codebunt/elasticsearch,Ansh90/elasticsearch,libosu/elasticsearch,fernandozhu/elasticsearch,yynil/elasticsearch,glefloch/elasticsearch,mikemccand/elasticsearch,kalimatas/elasticsearch,nellicus/elasticsearch,dantuffery/elasticsearch,phani546/elasticsearch,chirilo/elasticsearch,maddin2016/elasticsearch,cwurm/elasticsearch,linglaiyao1314/elasticsearch,areek/elasticsearch,truemped/elasticsearch,himanshuag/elasticsearch,slavau/elasticsearch,polyfractal/elasticsearch,zeroctu/elasticsearch,vorce/es-metrics,salyh/elasticsearch,obourgain/elasticsearch,geidies/elasticsearch,zhaocloud/elasticsearch,AleksKochev/elasticsearch,amaliujia/elasticsearch,aglne/elasticsearch,mcku/elasticsearch,tsohil/elasticsearch,umeshdangat/elasticsearch,TonyChai24/ESSource,nellicus/elasticsearch,markwalkom/elasticsearch,queirozfcom/elasticsearch,hydro2k/elasticsearch,bawse/elasticsearch,SergVro/elasticsearch,schonfeld/elasticsearch,YosuaMichael/elasticsearch,njlawton/elasticsearch,JSCooke/elasticsearch,girirajsharma/elasticsearch,bawse/elasticsearch,dantuffery/elasticsearch,Flipkart/elasticsearch,Clairebi/ElasticsearchClone,iacdingping/elasticsearch,kcompher/elasticsearch,petabytedata/elasticsearch,scottsom/elasticsearch,alexkuk/elasticsearch,JervyShi/elasticsearch,raishiv/elasticsearch,lmtwga/elasticsearch,libosu/elasticsearch,raishiv/elasticsearch,zeroctu/elasticsearch,jimhooker2002/elasticsearch,fred84/elasticsearch,snikch/elasticsearch,myelin/elasticsearch,vrkansagara/elasticsearch,jango2015/elasticsearch,liweinan0423/elasticsearch,onegambler/elasticsearch,iacdingping/elasticsearch,jpountz/elasticsearch,Clairebi/ElasticsearchClone,jw0201/elastic,AndreKR/elasticsearch,tkssharma/elasticsearch,cnfire/elasticsearch-1,aparo/elasticsearch,pablocastro/elasticsearch,vietlq/elasticsearch,ImpressTV/elasticsearch,a2lin/elasticsearch,ulkas/elasticsearch,mapr/elasticsearch,IanvsPoplicola/elasticsearch,C-Bish/elasticsearch,JervyShi/elasticsearch,LewayneNaidoo/elasticsearch,AshishThakur/elasticsearch,bawse/elasticsearch,zkidkid/elasticsearch,onegambler/elasticsearch,kingaj/elasticsearch,humandb/elasticsearch,nazarewk/elasticsearch,humandb/elasticsearch,truemped/elasticsearch,dongaihua/highlight-elasticsearch,wangtuo/elasticsearch,kcompher/elasticsearch,szroland/elasticsearch,pritishppai/elasticsearch,javachengwc/elasticsearch,glefloch/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,amit-shar/elasticsearch,brandonkearby/elasticsearch,Chhunlong/elasticsearch,btiernay/elasticsearch,Shekharrajak/elasticsearch,himanshuag/elasticsearch,Collaborne/elasticsearch,iacdingping/elasticsearch,knight1128/elasticsearch,zhiqinghuang/elasticsearch,geidies/elasticsearch,ESamir/elasticsearch,uschindler/elasticsearch,Rygbee/elasticsearch,abibell/elasticsearch,Uiho/elasticsearch,yuy168/elasticsearch,knight1128/elasticsearch,sc0ttkclark/elasticsearch,StefanGor/elasticsearch,easonC/elasticsearch,ThiagoGarciaAlves/elasticsearch,ajhalani/elasticsearch,nezirus/elasticsearch,umeshdangat/elasticsearch,truemped/elasticsearch,nellicus/elasticsearch,jpountz/elasticsearch,mute/elasticsearch,micpalmia/elasticsearch,tahaemin/elasticsearch,dataduke/elasticsearch,bestwpw/elasticsearch,artnowo/elasticsearch,vietlq/elasticsearch,easonC/elasticsearch,fforbeck/elasticsearch,polyfractal/elasticsearch,xuzha/elasticsearch,truemped/elasticsearch,MjAbuz/elasticsearch,masterweb121/elasticsearch,apepper/elasticsearch,dantuffery/elasticsearch,springning/elasticsearch,mm0/elasticsearch,kubum/elasticsearch,vrkansagara/elasticsearch,infusionsoft/elasticsearch,vrkansagara/elasticsearch,beiske/elasticsearch,adrianbk/elasticsearch,janmejay/elasticsearch,raishiv/elasticsearch,cnfire/elasticsearch-1,fforbeck/elasticsearch,artnowo/elasticsearch,shreejay/elasticsearch,naveenhooda2000/elasticsearch,tcucchietti/elasticsearch,rajanm/elasticsearch,jimczi/elasticsearch,linglaiyao1314/elasticsearch,mbrukman/elasticsearch,vinsonlou/elasticsearch,coding0011/elasticsearch,episerver/elasticsearch,mcku/elasticsearch,andrestc/elasticsearch,Microsoft/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,Widen/elasticsearch,btiernay/elasticsearch,pozhidaevak/elasticsearch,tahaemin/elasticsearch,weipinghe/elasticsearch,sreeramjayan/elasticsearch,MichaelLiZhou/elasticsearch,SergVro/elasticsearch,i-am-Nathan/elasticsearch,schonfeld/elasticsearch,polyfractal/elasticsearch,sjohnr/elasticsearch,sjohnr/elasticsearch,caengcjd/elasticsearch,elancom/elasticsearch,tebriel/elasticsearch,adrianbk/elasticsearch,jprante/elasticsearch,huanzhong/elasticsearch,kimimj/elasticsearch,mohit/elasticsearch,brandonkearby/elasticsearch,HarishAtGitHub/elasticsearch,markharwood/elasticsearch,kalburgimanjunath/elasticsearch,huanzhong/elasticsearch,winstonewert/elasticsearch,sposam/elasticsearch,MichaelLiZhou/elasticsearch,obourgain/elasticsearch,MjAbuz/elasticsearch,alexkuk/elasticsearch,jchampion/elasticsearch,jimhooker2002/elasticsearch,MisterAndersen/elasticsearch,Chhunlong/elasticsearch,abibell/elasticsearch,jango2015/elasticsearch,rmuir/elasticsearch,tcucchietti/elasticsearch,yongminxia/elasticsearch,vietlq/elasticsearch,sneivandt/elasticsearch,tsohil/elasticsearch,adrianbk/elasticsearch,uboness/elasticsearch,palecur/elasticsearch,avikurapati/elasticsearch,masaruh/elasticsearch,Kakakakakku/elasticsearch,mm0/elasticsearch,JackyMai/elasticsearch,bestwpw/elasticsearch,rlugojr/elasticsearch,mapr/elasticsearch,mm0/elasticsearch,mnylen/elasticsearch,Siddartha07/elasticsearch,koxa29/elasticsearch,SergVro/elasticsearch,hirdesh2008/elasticsearch,MjAbuz/elasticsearch,trangvh/elasticsearch,HonzaKral/elasticsearch,koxa29/elasticsearch,yanjunh/elasticsearch,clintongormley/elasticsearch,18098924759/elasticsearch,ulkas/elasticsearch,hechunwen/elasticsearch,hanst/elasticsearch,nknize/elasticsearch,wayeast/elasticsearch,Shepard1212/elasticsearch,naveenhooda2000/elasticsearch,alexksikes/elasticsearch,Kakakakakku/elasticsearch,Uiho/elasticsearch,djschny/elasticsearch,wittyameta/elasticsearch,pranavraman/elasticsearch,palecur/elasticsearch,peschlowp/elasticsearch,Kakakakakku/elasticsearch,mortonsykes/elasticsearch,xingguang2013/elasticsearch,NBSW/elasticsearch,LewayneNaidoo/elasticsearch,LewayneNaidoo/elasticsearch,episerver/elasticsearch,vingupta3/elasticsearch,hydro2k/elasticsearch,strapdata/elassandra5-rc,queirozfcom/elasticsearch,Liziyao/elasticsearch,martinstuga/elasticsearch,mrorii/elasticsearch,ydsakyclguozi/elasticsearch,cwurm/elasticsearch,AleksKochev/elasticsearch,F0lha/elasticsearch,myelin/elasticsearch,gfyoung/elasticsearch,Siddartha07/elasticsearch,peschlowp/elasticsearch,JSCooke/elasticsearch,alexshadow007/elasticsearch,uboness/elasticsearch,dylan8902/elasticsearch,tahaemin/elasticsearch,hydro2k/elasticsearch,truemped/elasticsearch,ouyangkongtong/elasticsearch,brwe/elasticsearch,AndreKR/elasticsearch,LeoYao/elasticsearch,sposam/elasticsearch,mrorii/elasticsearch,markwalkom/elasticsearch,mmaracic/elasticsearch,episerver/elasticsearch,sauravmondallive/elasticsearch,kaneshin/elasticsearch,MaineC/elasticsearch,marcuswr/elasticsearch-dateline,maddin2016/elasticsearch,uschindler/elasticsearch,xingguang2013/elasticsearch,diendt/elasticsearch,yanjunh/elasticsearch,fooljohnny/elasticsearch,jw0201/elastic,koxa29/elasticsearch,socialrank/elasticsearch,KimTaehee/elasticsearch,iantruslove/elasticsearch,queirozfcom/elasticsearch,kalimatas/elasticsearch,sneivandt/elasticsearch,springning/elasticsearch,kingaj/elasticsearch,chirilo/elasticsearch,linglaiyao1314/elasticsearch,wbowling/elasticsearch,Chhunlong/elasticsearch,likaiwalkman/elasticsearch,nellicus/elasticsearch,jaynblue/elasticsearch,davidvgalbraith/elasticsearch,uboness/elasticsearch,bawse/elasticsearch,skearns64/elasticsearch,strapdata/elassandra-test,jw0201/elastic,abibell/elasticsearch,petabytedata/elasticsearch,linglaiyao1314/elasticsearch,chrismwendt/elasticsearch,skearns64/elasticsearch,dantuffery/elasticsearch,drewr/elasticsearch,nezirus/elasticsearch,iantruslove/elasticsearch,wayeast/elasticsearch,javachengwc/elasticsearch,kcompher/elasticsearch,ouyangkongtong/elasticsearch,wayeast/elasticsearch,yuy168/elasticsearch,djschny/elasticsearch,opendatasoft/elasticsearch,kubum/elasticsearch,diendt/elasticsearch,mapr/elasticsearch,smflorentino/elasticsearch,lightslife/elasticsearch,mmaracic/elasticsearch,sjohnr/elasticsearch,iacdingping/elasticsearch,ImpressTV/elasticsearch,HarishAtGitHub/elasticsearch,NBSW/elasticsearch,elasticdog/elasticsearch,lks21c/elasticsearch,PhaedrusTheGreek/elasticsearch,njlawton/elasticsearch,liweinan0423/elasticsearch,hydro2k/elasticsearch,petmit/elasticsearch,Charlesdong/elasticsearch,alexkuk/elasticsearch,lightslife/elasticsearch,achow/elasticsearch,huanzhong/elasticsearch,sscarduzio/elasticsearch,hanst/elasticsearch,pozhidaevak/elasticsearch,mjhennig/elasticsearch,fred84/elasticsearch,rmuir/elasticsearch,springning/elasticsearch,Flipkart/elasticsearch,nknize/elasticsearch,nazarewk/elasticsearch,dataduke/elasticsearch,ckclark/elasticsearch,achow/elasticsearch,Siddartha07/elasticsearch,cnfire/elasticsearch-1,easonC/elasticsearch,iamjakob/elasticsearch,andrewvc/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,Shepard1212/elasticsearch,brwe/elasticsearch,xpandan/elasticsearch,elancom/elasticsearch,rlugojr/elasticsearch,khiraiwa/elasticsearch,franklanganke/elasticsearch,karthikjaps/elasticsearch,sdauletau/elasticsearch,mmaracic/elasticsearch,feiqitian/elasticsearch,vingupta3/elasticsearch,andrestc/elasticsearch,TonyChai24/ESSource,hanswang/elasticsearch,EasonYi/elasticsearch,Helen-Zhao/elasticsearch,kunallimaye/elasticsearch,drewr/elasticsearch,snikch/elasticsearch,strapdata/elassandra-test,mcku/elasticsearch,snikch/elasticsearch,lmenezes/elasticsearch,Chhunlong/elasticsearch,vroyer/elasticassandra,xingguang2013/elasticsearch,vvcephei/elasticsearch,brandonkearby/elasticsearch,szroland/elasticsearch,a2lin/elasticsearch,LeoYao/elasticsearch,kimimj/elasticsearch,fforbeck/elasticsearch,rento19962/elasticsearch,huypx1292/elasticsearch,VukDukic/elasticsearch,xingguang2013/elasticsearch,heng4fun/elasticsearch,kunallimaye/elasticsearch,btiernay/elasticsearch,strapdata/elassandra-test,ImpressTV/elasticsearch,beiske/elasticsearch,mapr/elasticsearch,sarwarbhuiyan/elasticsearch,lydonchandra/elasticsearch,amaliujia/elasticsearch,infusionsoft/elasticsearch,xpandan/elasticsearch,hafkensite/elasticsearch,Shekharrajak/elasticsearch,weipinghe/elasticsearch,schonfeld/elasticsearch,Widen/elasticsearch,jsgao0/elasticsearch,adrianbk/elasticsearch,abhijitiitr/es,huypx1292/elasticsearch,alexbrasetvik/elasticsearch,kimimj/elasticsearch,trangvh/elasticsearch,vvcephei/elasticsearch,petmit/elasticsearch,18098924759/elasticsearch,karthikjaps/elasticsearch,hanswang/elasticsearch,thecocce/elasticsearch,AleksKochev/elasticsearch,mcku/elasticsearch,pranavraman/elasticsearch,ThalaivaStars/OrgRepo1,dataduke/elasticsearch,salyh/elasticsearch,MjAbuz/elasticsearch,jsgao0/elasticsearch,hirdesh2008/elasticsearch,caengcjd/elasticsearch,huypx1292/elasticsearch,queirozfcom/elasticsearch,Asimov4/elasticsearch,scorpionvicky/elasticsearch,ouyangkongtong/elasticsearch,AshishThakur/elasticsearch,ThalaivaStars/OrgRepo1,areek/elasticsearch,ulkas/elasticsearch,GlenRSmith/elasticsearch,lzo/elasticsearch-1,ivansun1010/elasticsearch,s1monw/elasticsearch,franklanganke/elasticsearch,davidvgalbraith/elasticsearch,weipinghe/elasticsearch,ImpressTV/elasticsearch,sreeramjayan/elasticsearch,umeshdangat/elasticsearch,aglne/elasticsearch,jpountz/elasticsearch,Ansh90/elasticsearch,loconsolutions/elasticsearch,wangyuxue/elasticsearch,geidies/elasticsearch,vroyer/elassandra,jw0201/elastic,mnylen/elasticsearch,strapdata/elassandra-test,dantuffery/elasticsearch,lmtwga/elasticsearch,btiernay/elasticsearch,strapdata/elassandra,kenshin233/elasticsearch,alexshadow007/elasticsearch,scottsom/elasticsearch,rento19962/elasticsearch,sreeramjayan/elasticsearch,pritishppai/elasticsearch,jimhooker2002/elasticsearch,rmuir/elasticsearch,petabytedata/elasticsearch,strapdata/elassandra,MetSystem/elasticsearch,lks21c/elasticsearch,sarwarbhuiyan/elasticsearch,strapdata/elassandra5-rc,sauravmondallive/elasticsearch,gingerwizard/elasticsearch,apepper/elasticsearch,dongjoon-hyun/elasticsearch,slavau/elasticsearch,smflorentino/elasticsearch,libosu/elasticsearch,mjason3/elasticsearch,cnfire/elasticsearch-1,wenpos/elasticsearch,lightslife/elasticsearch,easonC/elasticsearch,gmarz/elasticsearch,pritishppai/elasticsearch,libosu/elasticsearch,artnowo/elasticsearch,Asimov4/elasticsearch,maddin2016/elasticsearch,pranavraman/elasticsearch,tahaemin/elasticsearch,Ansh90/elasticsearch,scorpionvicky/elasticsearch,onegambler/elasticsearch,sposam/elasticsearch,lydonchandra/elasticsearch,queirozfcom/elasticsearch,geidies/elasticsearch,elasticdog/elasticsearch,bestwpw/elasticsearch,sposam/elasticsearch,vvcephei/elasticsearch,combinatorist/elasticsearch,YosuaMichael/elasticsearch,nilabhsagar/elasticsearch,hafkensite/elasticsearch,zhiqinghuang/elasticsearch,lzo/elasticsearch-1,likaiwalkman/elasticsearch,zeroctu/elasticsearch,thecocce/elasticsearch,hafkensite/elasticsearch,caengcjd/elasticsearch,wuranbo/elasticsearch,markllama/elasticsearch,markwalkom/elasticsearch,jprante/elasticsearch,iantruslove/elasticsearch,Widen/elasticsearch,mrorii/elasticsearch,pranavraman/elasticsearch,franklanganke/elasticsearch,Rygbee/elasticsearch,alexshadow007/elasticsearch,nrkkalyan/elasticsearch,Stacey-Gammon/elasticsearch,achow/elasticsearch,kkirsche/elasticsearch,jprante/elasticsearch,wuranbo/elasticsearch,tahaemin/elasticsearch,infusionsoft/elasticsearch,jsgao0/elasticsearch,salyh/elasticsearch,Shepard1212/elasticsearch,jango2015/elasticsearch,jchampion/elasticsearch,ivansun1010/elasticsearch,btiernay/elasticsearch,mkis-/elasticsearch,sscarduzio/elasticsearch,jw0201/elastic,s1monw/elasticsearch,elancom/elasticsearch,aglne/elasticsearch,springning/elasticsearch,chrismwendt/elasticsearch,kunallimaye/elasticsearch,mortonsykes/elasticsearch,iantruslove/elasticsearch,strapdata/elassandra-test,YosuaMichael/elasticsearch,nomoa/elasticsearch,markwalkom/elasticsearch,vorce/es-metrics,sdauletau/elasticsearch,hanswang/elasticsearch,ivansun1010/elasticsearch,areek/elasticsearch,uschindler/elasticsearch,karthikjaps/elasticsearch,SergVro/elasticsearch,TonyChai24/ESSource,robin13/elasticsearch,xingguang2013/elasticsearch,infusionsoft/elasticsearch,mmaracic/elasticsearch,wittyameta/elasticsearch,zkidkid/elasticsearch,camilojd/elasticsearch,tebriel/elasticsearch,ckclark/elasticsearch,dpursehouse/elasticsearch,btiernay/elasticsearch,combinatorist/elasticsearch,kkirsche/elasticsearch,bestwpw/elasticsearch,synhershko/elasticsearch,Shepard1212/elasticsearch,fubuki/elasticsearch,wittyameta/elasticsearch,huypx1292/elasticsearch,franklanganke/elasticsearch,markwalkom/elasticsearch,acchen97/elasticsearch,milodky/elasticsearch,rajanm/elasticsearch,yuy168/elasticsearch,kenshin233/elasticsearch,yuy168/elasticsearch,schonfeld/elasticsearch,tsohil/elasticsearch,hafkensite/elasticsearch,lzo/elasticsearch-1,iacdingping/elasticsearch,fforbeck/elasticsearch,PhaedrusTheGreek/elasticsearch,janmejay/elasticsearch,golubev/elasticsearch,gfyoung/elasticsearch,pablocastro/elasticsearch,EasonYi/elasticsearch,yynil/elasticsearch,wbowling/elasticsearch,karthikjaps/elasticsearch,rento19962/elasticsearch,StefanGor/elasticsearch,GlenRSmith/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,JervyShi/elasticsearch,mute/elasticsearch,girirajsharma/elasticsearch,masaruh/elasticsearch,milodky/elasticsearch,camilojd/elasticsearch,xuzha/elasticsearch,sarwarbhuiyan/elasticsearch,nellicus/elasticsearch,njlawton/elasticsearch,skearns64/elasticsearch,hanswang/elasticsearch,kaneshin/elasticsearch,ulkas/elasticsearch,lmtwga/elasticsearch,SergVro/elasticsearch,thecocce/elasticsearch,ThalaivaStars/OrgRepo1,strapdata/elassandra5-rc,szroland/elasticsearch,drewr/elasticsearch,jimczi/elasticsearch,vingupta3/elasticsearch,girirajsharma/elasticsearch,acchen97/elasticsearch,Liziyao/elasticsearch,nezirus/elasticsearch,kalburgimanjunath/elasticsearch,pablocastro/elasticsearch,yynil/elasticsearch,gingerwizard/elasticsearch,andrewvc/elasticsearch,andrejserafim/elasticsearch,kevinkluge/elasticsearch,awislowski/elasticsearch,gmarz/elasticsearch,janmejay/elasticsearch,jango2015/elasticsearch,uboness/elasticsearch,phani546/elasticsearch,wbowling/elasticsearch,javachengwc/elasticsearch,Shepard1212/elasticsearch,alexkuk/elasticsearch,masterweb121/elasticsearch,kenshin233/elasticsearch,lightslife/elasticsearch,sreeramjayan/elasticsearch,nomoa/elasticsearch,hirdesh2008/elasticsearch,kkirsche/elasticsearch,feiqitian/elasticsearch,nomoa/elasticsearch,tcucchietti/elasticsearch,slavau/elasticsearch,Stacey-Gammon/elasticsearch,easonC/elasticsearch,mkis-/elasticsearch,coding0011/elasticsearch,Microsoft/elasticsearch,andrejserafim/elasticsearch,linglaiyao1314/elasticsearch,abhijitiitr/es,elasticdog/elasticsearch,AndreKR/elasticsearch,YosuaMichael/elasticsearch,TonyChai24/ESSource,zeroctu/elasticsearch,trangvh/elasticsearch,Rygbee/elasticsearch,kevinkluge/elasticsearch,kenshin233/elasticsearch,JSCooke/elasticsearch,mapr/elasticsearch,TonyChai24/ESSource,StefanGor/elasticsearch,iamjakob/elasticsearch,winstonewert/elasticsearch,a2lin/elasticsearch,areek/elasticsearch,hanswang/elasticsearch,Siddartha07/elasticsearch,mm0/elasticsearch,MisterAndersen/elasticsearch,andrejserafim/elasticsearch,janmejay/elasticsearch,ivansun1010/elasticsearch,xuzha/elasticsearch,nknize/elasticsearch,jaynblue/elasticsearch,rento19962/elasticsearch,LeoYao/elasticsearch,mjhennig/elasticsearch,tahaemin/elasticsearch,himanshuag/elasticsearch,drewr/elasticsearch,beiske/elasticsearch,kalburgimanjunath/elasticsearch,phani546/elasticsearch,abibell/elasticsearch,markllama/elasticsearch,LewayneNaidoo/elasticsearch,linglaiyao1314/elasticsearch,pranavraman/elasticsearch,gingerwizard/elasticsearch,Clairebi/ElasticsearchClone,areek/elasticsearch,C-Bish/elasticsearch,iamjakob/elasticsearch,mnylen/elasticsearch,szroland/elasticsearch,Ansh90/elasticsearch,kingaj/elasticsearch,sarwarbhuiyan/elasticsearch,pablocastro/elasticsearch,rajanm/elasticsearch,apepper/elasticsearch,sc0ttkclark/elasticsearch,jimhooker2002/elasticsearch,mapr/elasticsearch,Fsero/elasticsearch,lmtwga/elasticsearch,kunallimaye/elasticsearch,marcuswr/elasticsearch-dateline,vvcephei/elasticsearch,rhoml/elasticsearch,mm0/elasticsearch,rhoml/elasticsearch,IanvsPoplicola/elasticsearch,wuranbo/elasticsearch,golubev/elasticsearch,mohsinh/elasticsearch,qwerty4030/elasticsearch,huanzhong/elasticsearch,yongminxia/elasticsearch,kingaj/elasticsearch,amit-shar/elasticsearch,ESamir/elasticsearch,zkidkid/elasticsearch,khiraiwa/elasticsearch,JervyShi/elasticsearch,milodky/elasticsearch,girirajsharma/elasticsearch,martinstuga/elasticsearch,mohit/elasticsearch,infusionsoft/elasticsearch,ricardocerq/elasticsearch,ulkas/elasticsearch,Flipkart/elasticsearch,wangtuo/elasticsearch,18098924759/elasticsearch,petmit/elasticsearch,smflorentino/elasticsearch,pablocastro/elasticsearch,rhoml/elasticsearch,peschlowp/elasticsearch,spiegela/elasticsearch,tkssharma/elasticsearch,MetSystem/elasticsearch,yanjunh/elasticsearch,pablocastro/elasticsearch,henakamaMSFT/elasticsearch,ThiagoGarciaAlves/elasticsearch,cwurm/elasticsearch,tsohil/elasticsearch,HarishAtGitHub/elasticsearch,Charlesdong/elasticsearch,loconsolutions/elasticsearch,shreejay/elasticsearch,amit-shar/elasticsearch,mohit/elasticsearch,Flipkart/elasticsearch,Microsoft/elasticsearch,Stacey-Gammon/elasticsearch,shreejay/elasticsearch,kenshin233/elasticsearch,xpandan/elasticsearch,loconsolutions/elasticsearch,fekaputra/elasticsearch,kevinkluge/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,Brijeshrpatel9/elasticsearch,ESamir/elasticsearch,mortonsykes/elasticsearch,luiseduardohdbackup/elasticsearch,vingupta3/elasticsearch,raishiv/elasticsearch,heng4fun/elasticsearch,ydsakyclguozi/elasticsearch,onegambler/elasticsearch,shreejay/elasticsearch,nazarewk/elasticsearch,KimTaehee/elasticsearch,AshishThakur/elasticsearch,Ansh90/elasticsearch,fooljohnny/elasticsearch,socialrank/elasticsearch,bestwpw/elasticsearch,MaineC/elasticsearch,PhaedrusTheGreek/elasticsearch,hirdesh2008/elasticsearch,geidies/elasticsearch,masterweb121/elasticsearch,kalimatas/elasticsearch,franklanganke/elasticsearch,abibell/elasticsearch,mute/elasticsearch,i-am-Nathan/elasticsearch,kaneshin/elasticsearch,polyfractal/elasticsearch,lydonchandra/elasticsearch,nezirus/elasticsearch,zhaocloud/elasticsearch,achow/elasticsearch,hanswang/elasticsearch,springning/elasticsearch,AleksKochev/elasticsearch,Fsero/elasticsearch,GlenRSmith/elasticsearch,queirozfcom/elasticsearch,Helen-Zhao/elasticsearch,fernandozhu/elasticsearch,mgalushka/elasticsearch,sc0ttkclark/elasticsearch,vroyer/elassandra,Kakakakakku/elasticsearch,C-Bish/elasticsearch,himanshuag/elasticsearch,VukDukic/elasticsearch,koxa29/elasticsearch,jeteve/elasticsearch,nellicus/elasticsearch,KimTaehee/elasticsearch,Fsero/elasticsearch,diendt/elasticsearch,qwerty4030/elasticsearch,robin13/elasticsearch,kaneshin/elasticsearch,Collaborne/elasticsearch,alexkuk/elasticsearch,mjason3/elasticsearch,gfyoung/elasticsearch,djschny/elasticsearch,wimvds/elasticsearch,naveenhooda2000/elasticsearch,sc0ttkclark/elasticsearch,jchampion/elasticsearch,koxa29/elasticsearch,yongminxia/elasticsearch,fooljohnny/elasticsearch,yuy168/elasticsearch,heng4fun/elasticsearch,humandb/elasticsearch,ydsakyclguozi/elasticsearch,trangvh/elasticsearch,wayeast/elasticsearch,mjason3/elasticsearch,JSCooke/elasticsearch,spiegela/elasticsearch,jprante/elasticsearch,bestwpw/elasticsearch,vingupta3/elasticsearch,ZTE-PaaS/elasticsearch,IanvsPoplicola/elasticsearch,tcucchietti/elasticsearch,TonyChai24/ESSource,SaiprasadKrishnamurthy/elasticsearch,obourgain/elasticsearch,liweinan0423/elasticsearch,dataduke/elasticsearch,dylan8902/elasticsearch,lchennup/elasticsearch,amit-shar/elasticsearch,kunallimaye/elasticsearch,nilabhsagar/elasticsearch,myelin/elasticsearch,alexbrasetvik/elasticsearch,alexkuk/elasticsearch,chrismwendt/elasticsearch,umeshdangat/elasticsearch,ZTE-PaaS/elasticsearch,synhershko/elasticsearch,mkis-/elasticsearch,palecur/elasticsearch,fred84/elasticsearch,slavau/elasticsearch,feiqitian/elasticsearch,KimTaehee/elasticsearch,zeroctu/elasticsearch,VukDukic/elasticsearch,chirilo/elasticsearch,djschny/elasticsearch,mjhennig/elasticsearch,pranavraman/elasticsearch,EasonYi/elasticsearch,YosuaMichael/elasticsearch,awislowski/elasticsearch,Uiho/elasticsearch,yynil/elasticsearch,yanjunh/elasticsearch,boliza/elasticsearch,mute/elasticsearch,wbowling/elasticsearch,sauravmondallive/elasticsearch,tkssharma/elasticsearch,lydonchandra/elasticsearch,javachengwc/elasticsearch,davidvgalbraith/elasticsearch,skearns64/elasticsearch,snikch/elasticsearch,libosu/elasticsearch,nrkkalyan/elasticsearch,dylan8902/elasticsearch,yongminxia/elasticsearch,wuranbo/elasticsearch,naveenhooda2000/elasticsearch,TonyChai24/ESSource,alexbrasetvik/elasticsearch,vietlq/elasticsearch,markwalkom/elasticsearch,episerver/elasticsearch,glefloch/elasticsearch,artnowo/elasticsearch,achow/elasticsearch,AndreKR/elasticsearch,jaynblue/elasticsearch,wenpos/elasticsearch,amaliujia/elasticsearch,adrianbk/elasticsearch,MetSystem/elasticsearch,brwe/elasticsearch,EasonYi/elasticsearch,i-am-Nathan/elasticsearch,alexksikes/elasticsearch,smflorentino/elasticsearch,himanshuag/elasticsearch,anti-social/elasticsearch,weipinghe/elasticsearch,peschlowp/elasticsearch,MetSystem/elasticsearch,MichaelLiZhou/elasticsearch,caengcjd/elasticsearch,weipinghe/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,mute/elasticsearch,dongjoon-hyun/elasticsearch,SergVro/elasticsearch,elasticdog/elasticsearch,LeoYao/elasticsearch,sc0ttkclark/elasticsearch,mohsinh/elasticsearch,sc0ttkclark/elasticsearch,mikemccand/elasticsearch,luiseduardohdbackup/elasticsearch,Brijeshrpatel9/elasticsearch,kaneshin/elasticsearch,ouyangkongtong/elasticsearch,petmit/elasticsearch,tebriel/elasticsearch,fekaputra/elasticsearch,spiegela/elasticsearch,kaneshin/elasticsearch,koxa29/elasticsearch,a2lin/elasticsearch,rmuir/elasticsearch,chirilo/elasticsearch,chrismwendt/elasticsearch,YosuaMichael/elasticsearch,VukDukic/elasticsearch,clintongormley/elasticsearch,Widen/elasticsearch,Widen/elasticsearch,ckclark/elasticsearch,mm0/elasticsearch,markllama/elasticsearch,nellicus/elasticsearch,fooljohnny/elasticsearch,likaiwalkman/elasticsearch,drewr/elasticsearch,ImpressTV/elasticsearch,andrestc/elasticsearch,PhaedrusTheGreek/elasticsearch,gfyoung/elasticsearch,iamjakob/elasticsearch,kimimj/elasticsearch,cnfire/elasticsearch-1,alexksikes/elasticsearch,xuzha/elasticsearch,kevinkluge/elasticsearch,jaynblue/elasticsearch,kimimj/elasticsearch,infusionsoft/elasticsearch,mjhennig/elasticsearch,elancom/elasticsearch,huanzhong/elasticsearch,beiske/elasticsearch,hanswang/elasticsearch,lchennup/elasticsearch,Shekharrajak/elasticsearch,acchen97/elasticsearch,adrianbk/elasticsearch,mrorii/elasticsearch,kimimj/elasticsearch,xpandan/elasticsearch,nrkkalyan/elasticsearch,vietlq/elasticsearch,hanst/elasticsearch,sdauletau/elasticsearch,fubuki/elasticsearch,phani546/elasticsearch,xuzha/elasticsearch,amit-shar/elasticsearch,artnowo/elasticsearch,cwurm/elasticsearch,awislowski/elasticsearch,smflorentino/elasticsearch,ImpressTV/elasticsearch,Siddartha07/elasticsearch,vroyer/elasticassandra,nilabhsagar/elasticsearch,weipinghe/elasticsearch,MetSystem/elasticsearch,boliza/elasticsearch,franklanganke/elasticsearch,Stacey-Gammon/elasticsearch,jbertouch/elasticsearch,codebunt/elasticsearch,strapdata/elassandra,lzo/elasticsearch-1,nazarewk/elasticsearch,Widen/elasticsearch,codebunt/elasticsearch,nknize/elasticsearch,janmejay/elasticsearch,mohit/elasticsearch,NBSW/elasticsearch,pranavraman/elasticsearch,HarishAtGitHub/elasticsearch,Asimov4/elasticsearch,ricardocerq/elasticsearch,mortonsykes/elasticsearch,sreeramjayan/elasticsearch,jimhooker2002/elasticsearch,nilabhsagar/elasticsearch,lzo/elasticsearch-1,abibell/elasticsearch,bawse/elasticsearch,brwe/elasticsearch,kalimatas/elasticsearch,zhiqinghuang/elasticsearch,Helen-Zhao/elasticsearch,EasonYi/elasticsearch,luiseduardohdbackup/elasticsearch,tsohil/elasticsearch,vroyer/elassandra,boliza/elasticsearch,Flipkart/elasticsearch,kcompher/elasticsearch,Collaborne/elasticsearch,jbertouch/elasticsearch,iantruslove/elasticsearch,wenpos/elasticsearch,fubuki/elasticsearch,Collaborne/elasticsearch,jsgao0/elasticsearch,wayeast/elasticsearch,gingerwizard/elasticsearch,wittyameta/elasticsearch,jimczi/elasticsearch,HonzaKral/elasticsearch,beiske/elasticsearch,18098924759/elasticsearch,jeteve/elasticsearch,awislowski/elasticsearch,wimvds/elasticsearch,hirdesh2008/elasticsearch,fooljohnny/elasticsearch,yongminxia/elasticsearch,gmarz/elasticsearch,wangyuxue/elasticsearch,rlugojr/elasticsearch,wbowling/elasticsearch,mmaracic/elasticsearch,scottsom/elasticsearch,tebriel/elasticsearch,alexksikes/elasticsearch,thecocce/elasticsearch,MichaelLiZhou/elasticsearch,mbrukman/elasticsearch,slavau/elasticsearch,Helen-Zhao/elasticsearch,xingguang2013/elasticsearch,yynil/elasticsearch,zeroctu/elasticsearch,Ansh90/elasticsearch,combinatorist/elasticsearch,fubuki/elasticsearch,areek/elasticsearch,tsohil/elasticsearch,NBSW/elasticsearch,MisterAndersen/elasticsearch,iamjakob/elasticsearch,micpalmia/elasticsearch,wbowling/elasticsearch,likaiwalkman/elasticsearch,xuzha/elasticsearch,amaliujia/elasticsearch,Liziyao/elasticsearch,lydonchandra/elasticsearch,sc0ttkclark/elasticsearch,slavau/elasticsearch,chrismwendt/elasticsearch,kalburgimanjunath/elasticsearch,dongaihua/highlight-elasticsearch,brwe/elasticsearch,ZTE-PaaS/elasticsearch,karthikjaps/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,marcuswr/elasticsearch-dateline,acchen97/elasticsearch,EasonYi/elasticsearch,hanst/elasticsearch,sposam/elasticsearch,LeoYao/elasticsearch,masaruh/elasticsearch,MetSystem/elasticsearch,janmejay/elasticsearch,queirozfcom/elasticsearch,dpursehouse/elasticsearch,vvcephei/elasticsearch,gmarz/elasticsearch,kimchy/elasticsearch
/* * Licensed to Elastic Search and Shay Banon under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Elastic Search licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.rest.action.search; import com.google.inject.Inject; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchOperationThreading; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.client.Client; import org.elasticsearch.rest.*; import org.elasticsearch.search.Scroll; import org.elasticsearch.util.json.JsonBuilder; import org.elasticsearch.util.settings.Settings; import java.io.IOException; import static org.elasticsearch.rest.RestRequest.Method.*; import static org.elasticsearch.rest.RestResponse.Status.*; import static org.elasticsearch.rest.action.support.RestJsonBuilder.*; import static org.elasticsearch.util.TimeValue.*; /** * @author kimchy (shay.banon) */ public class RestSearchScrollAction extends BaseRestHandler { @Inject public RestSearchScrollAction(Settings settings, Client client, RestController controller) { super(settings, client); controller.registerHandler(GET, "/_search/scroll", this); controller.registerHandler(POST, "/_search/scroll", this); controller.registerHandler(GET, "/_search/scroll/{scrollId}", this); controller.registerHandler(POST, "/_search/scroll/{scrollId}", this); } @Override public void handleRequest(final RestRequest request, final RestChannel channel) { SearchScrollRequest searchScrollRequest = new SearchScrollRequest(request.param("scrollId")); try { String scroll = request.param("scroll"); if (scroll != null) { searchScrollRequest.scroll(new Scroll(parseTimeValue(scroll, null))); } searchScrollRequest.listenerThreaded(false); SearchOperationThreading operationThreading = SearchOperationThreading.fromString(request.param("operationThreading"), SearchOperationThreading.SINGLE_THREAD); if (operationThreading == SearchOperationThreading.NO_THREADS) { // since we don't spawn, don't allow no_threads, but change it to a single thread operationThreading = SearchOperationThreading.SINGLE_THREAD; } searchScrollRequest.operationThreading(operationThreading); } catch (Exception e) { try { JsonBuilder builder = restJsonBuilder(request); channel.sendResponse(new JsonRestResponse(request, BAD_REQUEST, builder.startObject().field("error", e.getMessage()).endObject())); } catch (IOException e1) { logger.error("Failed to send failure response", e1); } return; } client.searchScroll(searchScrollRequest, new ActionListener<SearchResponse>() { @Override public void onResponse(SearchResponse response) { try { JsonBuilder builder = restJsonBuilder(request); builder.startObject(); response.toJson(builder, request); builder.endObject(); channel.sendResponse(new JsonRestResponse(request, OK, builder)); } catch (Exception e) { onFailure(e); } } @Override public void onFailure(Throwable e) { try { channel.sendResponse(new JsonThrowableRestResponse(request, e)); } catch (IOException e1) { logger.error("Failed to send failure response", e1); } } }); } }
modules/elasticsearch/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java
/* * Licensed to Elastic Search and Shay Banon under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Elastic Search licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.rest.action.search; import com.google.inject.Inject; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchOperationThreading; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.client.Client; import org.elasticsearch.rest.*; import org.elasticsearch.search.Scroll; import org.elasticsearch.util.json.JsonBuilder; import org.elasticsearch.util.settings.Settings; import java.io.IOException; import static org.elasticsearch.rest.RestRequest.Method.*; import static org.elasticsearch.rest.RestResponse.Status.*; import static org.elasticsearch.rest.action.support.RestJsonBuilder.*; import static org.elasticsearch.util.TimeValue.*; /** * @author kimchy (shay.banon) */ public class RestSearchScrollAction extends BaseRestHandler { @Inject public RestSearchScrollAction(Settings settings, Client client, RestController controller) { super(settings, client); controller.registerHandler(GET, "/_searchScroll", this); controller.registerHandler(POST, "/_searchScroll", this); controller.registerHandler(GET, "/_searchScroll/{scrollId}", this); controller.registerHandler(POST, "/_searchScroll/{scrollId}", this); } @Override public void handleRequest(final RestRequest request, final RestChannel channel) { SearchScrollRequest searchScrollRequest = new SearchScrollRequest(request.param("scrollId")); try { String scroll = request.param("scroll"); if (scroll != null) { searchScrollRequest.scroll(new Scroll(parseTimeValue(scroll, null))); } searchScrollRequest.listenerThreaded(false); SearchOperationThreading operationThreading = SearchOperationThreading.fromString(request.param("operationThreading"), SearchOperationThreading.SINGLE_THREAD); if (operationThreading == SearchOperationThreading.NO_THREADS) { // since we don't spawn, don't allow no_threads, but change it to a single thread operationThreading = SearchOperationThreading.SINGLE_THREAD; } searchScrollRequest.operationThreading(operationThreading); } catch (Exception e) { try { JsonBuilder builder = restJsonBuilder(request); channel.sendResponse(new JsonRestResponse(request, BAD_REQUEST, builder.startObject().field("error", e.getMessage()).endObject())); } catch (IOException e1) { logger.error("Failed to send failure response", e1); } return; } client.searchScroll(searchScrollRequest, new ActionListener<SearchResponse>() { @Override public void onResponse(SearchResponse response) { try { JsonBuilder builder = restJsonBuilder(request); builder.startObject(); response.toJson(builder, request); builder.endObject(); channel.sendResponse(new JsonRestResponse(request, OK, builder)); } catch (Exception e) { onFailure(e); } } @Override public void onFailure(Throwable e) { try { channel.sendResponse(new JsonThrowableRestResponse(request, e)); } catch (IOException e1) { logger.error("Failed to send failure response", e1); } } }); } }
check search scroll URI to /_search/scroll from /_searchScroll
modules/elasticsearch/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java
check search scroll URI to /_search/scroll from /_searchScroll
<ide><path>odules/elasticsearch/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java <ide> @Inject public RestSearchScrollAction(Settings settings, Client client, RestController controller) { <ide> super(settings, client); <ide> <del> controller.registerHandler(GET, "/_searchScroll", this); <del> controller.registerHandler(POST, "/_searchScroll", this); <del> controller.registerHandler(GET, "/_searchScroll/{scrollId}", this); <del> controller.registerHandler(POST, "/_searchScroll/{scrollId}", this); <add> controller.registerHandler(GET, "/_search/scroll", this); <add> controller.registerHandler(POST, "/_search/scroll", this); <add> controller.registerHandler(GET, "/_search/scroll/{scrollId}", this); <add> controller.registerHandler(POST, "/_search/scroll/{scrollId}", this); <ide> } <ide> <ide> @Override public void handleRequest(final RestRequest request, final RestChannel channel) {
Java
apache-2.0
5d381410e48b83baa500704e1f71548e47f447a3
0
sosy-lab/java-smt,sosy-lab/java-smt,sosy-lab/java-smt,sosy-lab/java-smt,sosy-lab/java-smt
/* * JavaSMT is an API wrapper for a collection of SMT solvers. * This file is part of JavaSMT. * * Copyright (C) 2007-2016 Dirk Beyer * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sosy_lab.java_smt.solvers.mathsat5; import static org.sosy_lab.java_smt.solvers.mathsat5.Mathsat5NativeApi.msat_assert_formula; import static org.sosy_lab.java_smt.solvers.mathsat5.Mathsat5NativeApi.msat_create_itp_group; import static org.sosy_lab.java_smt.solvers.mathsat5.Mathsat5NativeApi.msat_get_interpolant; import static org.sosy_lab.java_smt.solvers.mathsat5.Mathsat5NativeApi.msat_push_backtrack_point; import static org.sosy_lab.java_smt.solvers.mathsat5.Mathsat5NativeApi.msat_set_itp_group; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.primitives.Ints; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import org.sosy_lab.common.ShutdownNotifier; import org.sosy_lab.java_smt.api.BooleanFormula; import org.sosy_lab.java_smt.api.InterpolatingProverEnvironment; import org.sosy_lab.java_smt.api.SolverContext.ProverOptions; import org.sosy_lab.java_smt.api.SolverException; class Mathsat5InterpolatingProver extends Mathsat5AbstractProver<Integer> implements InterpolatingProverEnvironment<Integer> { private static final ImmutableSet<String> ALLOWED_FAILURE_MESSAGES = ImmutableSet.of( "impossible to build a suitable congruence graph!", "can't build ie-local interpolant", "set_raised on an already-raised proof", "splitting of AB-mixed terms not supported", "Hypothesis belongs neither to A nor to B", "FP<->BV combination unsupported by the current configuration", "cur_eq unknown to the classifier", "unknown constraint in the ItpMapper", "AB-mixed term not found in eq_itp map", "uncolored atom found in Array proof", "uncolorable Array proof", "arr: proof splitting not supported"); private static final ImmutableSet<String> ALLOWED_FAILURE_MESSAGE_PREFIXES = ImmutableSet.of("uncolorable NA lemma"); Mathsat5InterpolatingProver( Mathsat5SolverContext pMgr, ShutdownNotifier pShutdownNotifier, Mathsat5FormulaCreator creator, Set<ProverOptions> options) { super(pMgr, options, creator, pShutdownNotifier); } @Override protected void createConfig(Map<String, String> pConfig) { pConfig.put("interpolation", "true"); pConfig.put("model_generation", "true"); pConfig.put("theory.bv.eager", "false"); } @Override public Integer addConstraint(BooleanFormula f) { Preconditions.checkState(!closed); int group = msat_create_itp_group(curEnv); msat_set_itp_group(curEnv, group); long t = creator.extractInfo(f); msat_assert_formula(curEnv, t); return group; } @Override public void push() { Preconditions.checkState(!closed); msat_push_backtrack_point(curEnv); } @Override protected long getMsatModel() throws SolverException { // Interpolation in MathSAT is buggy at least for UFs+Ints and sometimes returns a wrong "SAT". // In this case, model generation fails and users should try again without interpolation. // Example failures: "Invalid model", "non-integer model value" // As this is a bug in MathSAT and not in our code, we throw a SolverException. // We do it only in InterpolatingProver because without interpolation this is not expected. try { return super.getMsatModel(); } catch (IllegalArgumentException e) { String msg = Strings.emptyToNull(e.getMessage()); throw new SolverException( "msat_get_model failed" + (msg != null ? " with \"" + msg + "\"" : "") + ", probably the actual problem is interpolation", e); } } @Override public BooleanFormula getInterpolant(Collection<Integer> formulasOfA) throws SolverException { Preconditions.checkState(!closed); int[] groupsOfA = Ints.toArray(formulasOfA); long itp; try { itp = msat_get_interpolant(curEnv, groupsOfA); } catch (IllegalArgumentException e) { final String message = e.getMessage(); if (!Strings.isNullOrEmpty(message) && (ALLOWED_FAILURE_MESSAGES.contains(message) || ALLOWED_FAILURE_MESSAGE_PREFIXES.stream().anyMatch(message::startsWith))) { // This is not a bug in our code, // but a problem of MathSAT which happens during interpolation throw new SolverException(message, e); } throw e; } return creator.encapsulateBoolean(itp); } @Override public List<BooleanFormula> getSeqInterpolants( List<? extends Collection<Integer>> partitionedFormulas) throws SolverException { // the fallback to a loop is sound and returns an inductive sequence of interpolants final List<BooleanFormula> itps = new ArrayList<>(); for (int i = 1; i < partitionedFormulas.size(); i++) { itps.add( getInterpolant(Lists.newArrayList(Iterables.concat(partitionedFormulas.subList(0, i))))); } return itps; } @Override public List<BooleanFormula> getTreeInterpolants( List<? extends Collection<Integer>> partitionedFormulas, int[] startOfSubTree) { throw new UnsupportedOperationException( "directly receiving tree interpolants is not supported." + "Use another solver or another strategy for interpolants."); } @Override public <T> T allSat(AllSatCallback<T> callback, List<BooleanFormula> important) { // TODO how can we support allsat in MathSat5-interpolation-prover? // error: "allsat is not compatible wwith proof generation" throw new UnsupportedOperationException( "allsat computation is not possible with interpolation prover."); } }
src/org/sosy_lab/java_smt/solvers/mathsat5/Mathsat5InterpolatingProver.java
/* * JavaSMT is an API wrapper for a collection of SMT solvers. * This file is part of JavaSMT. * * Copyright (C) 2007-2016 Dirk Beyer * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sosy_lab.java_smt.solvers.mathsat5; import static org.sosy_lab.java_smt.solvers.mathsat5.Mathsat5NativeApi.msat_assert_formula; import static org.sosy_lab.java_smt.solvers.mathsat5.Mathsat5NativeApi.msat_create_itp_group; import static org.sosy_lab.java_smt.solvers.mathsat5.Mathsat5NativeApi.msat_get_interpolant; import static org.sosy_lab.java_smt.solvers.mathsat5.Mathsat5NativeApi.msat_push_backtrack_point; import static org.sosy_lab.java_smt.solvers.mathsat5.Mathsat5NativeApi.msat_set_itp_group; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.primitives.Ints; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import org.sosy_lab.common.ShutdownNotifier; import org.sosy_lab.java_smt.api.BooleanFormula; import org.sosy_lab.java_smt.api.InterpolatingProverEnvironment; import org.sosy_lab.java_smt.api.SolverContext.ProverOptions; import org.sosy_lab.java_smt.api.SolverException; class Mathsat5InterpolatingProver extends Mathsat5AbstractProver<Integer> implements InterpolatingProverEnvironment<Integer> { private static final ImmutableSet<String> ALLOWED_FAILURE_MESSAGES = ImmutableSet.of( "impossible to build a suitable congruence graph!", "can't build ie-local interpolant", "set_raised on an already-raised proof", "splitting of AB-mixed terms not supported", "Hypothesis belongs neither to A nor to B", "FP<->BV combination unsupported by the current configuration", "cur_eq unknown to the classifier", "unknown constraint in the ItpMapper", "AB-mixed term not found in eq_itp map", "uncolored atom found in Array proof", "uncolorable Array proof", "arr: proof splitting not supported"); private static final ImmutableSet<String> ALLOWED_FAILURE_MESSAGE_PREFIXES = ImmutableSet.of("uncolorable NA lemma"); Mathsat5InterpolatingProver( Mathsat5SolverContext pMgr, ShutdownNotifier pShutdownNotifier, Mathsat5FormulaCreator creator, Set<ProverOptions> options) { super(pMgr, options, creator, pShutdownNotifier); } @Override protected void createConfig(Map<String, String> pConfig) { pConfig.put("interpolation", "true"); pConfig.put("model_generation", "true"); pConfig.put("theory.bv.eager", "false"); } @Override public Integer addConstraint(BooleanFormula f) { Preconditions.checkState(!closed); int group = msat_create_itp_group(curEnv); msat_set_itp_group(curEnv, group); long t = creator.extractInfo(f); msat_assert_formula(curEnv, t); return group; } @Override public void push() { Preconditions.checkState(!closed); msat_push_backtrack_point(curEnv); } @Override protected long getMsatModel() throws SolverException { // Interpolation in MathSAT is buggy at least for UFs+Ints and sometimes returns a wrong "SAT". // In this case, model generation fails and users should try again without interpolation. // Example failures: "Invalid model", "non-integer model value" // As this is a bug in MathSAT and not in our code, we throw a SolverException. // We do it only in InterpolatingProver because without interpolation this is not expected. try { return super.getMsatModel(); } catch (IllegalArgumentException e) { String msg = Strings.emptyToNull(e.getMessage()); throw new SolverException( "msat_get_model failed" + (msg != null ? " with \"" + msg + "\"" : "") + ", probably the actual problem is interpolation", e); } } @Override public BooleanFormula getInterpolant(Collection<Integer> formulasOfA) throws SolverException { Preconditions.checkState(!closed); int[] groupsOfA = Ints.toArray(formulasOfA); long itp; try { itp = msat_get_interpolant(curEnv, groupsOfA); } catch (IllegalArgumentException e) { final String message = e.getMessage(); if (!Strings.isNullOrEmpty(message) && (ALLOWED_FAILURE_MESSAGES.contains(message) || ALLOWED_FAILURE_MESSAGE_PREFIXES.stream().anyMatch(message::startsWith))) { // This is not a bug in our code, // but a problem of MathSAT which happens during interpolation throw new SolverException(message, e); } throw e; } return creator.encapsulateBoolean(itp); } @Override public List<BooleanFormula> getSeqInterpolants( List<? extends Collection<Integer>> partitionedFormulas) throws SolverException { // the fallback to a loop is sound and returns an inductive sequence of interpolants final List<BooleanFormula> itps = new ArrayList<>(); for (int i = 0; i < partitionedFormulas.size(); i++) { itps.add( getInterpolant(Lists.newArrayList(Iterables.concat(partitionedFormulas.subList(0, i))))); } return itps; } @Override public List<BooleanFormula> getTreeInterpolants( List<? extends Collection<Integer>> partitionedFormulas, int[] startOfSubTree) { throw new UnsupportedOperationException( "directly receiving tree interpolants is not supported." + "Use another solver or another strategy for interpolants."); } @Override public <T> T allSat(AllSatCallback<T> callback, List<BooleanFormula> important) { // TODO how can we support allsat in MathSat5-interpolation-prover? // error: "allsat is not compatible wwith proof generation" throw new UnsupportedOperationException( "allsat computation is not possible with interpolation prover."); } }
bugfix for sequential interpolation loop in Mathsat5-wrapper.
src/org/sosy_lab/java_smt/solvers/mathsat5/Mathsat5InterpolatingProver.java
bugfix for sequential interpolation loop in Mathsat5-wrapper.
<ide><path>rc/org/sosy_lab/java_smt/solvers/mathsat5/Mathsat5InterpolatingProver.java <ide> List<? extends Collection<Integer>> partitionedFormulas) throws SolverException { <ide> // the fallback to a loop is sound and returns an inductive sequence of interpolants <ide> final List<BooleanFormula> itps = new ArrayList<>(); <del> for (int i = 0; i < partitionedFormulas.size(); i++) { <add> for (int i = 1; i < partitionedFormulas.size(); i++) { <ide> itps.add( <ide> getInterpolant(Lists.newArrayList(Iterables.concat(partitionedFormulas.subList(0, i))))); <ide> }
Java
apache-2.0
fd694bedb1586c076ce15c5ae9d99760e2ed5776
0
npatarino/apk-methods-analyzer,itcayman/dex-method-counts,marcoaros/dex-method-counts,liufuxin/dex-method-counts,MaTriXy/dex-method-counts,cncomer/dex-method-counts,luoxiaobin88/dex-method-counts,liqiuzuo/dex-method-counts,Rowandjj/dex-method-counts,itcayman/dex-method-counts,laiqurufeng/dex-method-counts,mihaip/dex-method-counts,Rowandjj/dex-method-counts,liufuxin/dex-method-counts,luoxiaobin88/dex-method-counts,MaTriXy/dex-method-counts,cncomer/dex-method-counts,cpinan/dex-method-counts,npatarino/apk-methods-analyzer,mihaip/dex-method-counts,dambrisco/dex-method-counts,dambrisco/dex-method-counts,liqiuzuo/dex-method-counts,cpinan/dex-method-counts,marcoaros/dex-method-counts,npatarino/apk-methods-analyzer,laiqurufeng/dex-method-counts
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package info.persistent.dex; import com.android.dexdeps.DexData; import com.android.dexdeps.DexDataException; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.RandomAccessFile; import java.util.ArrayList; import java.util.List; import java.util.zip.ZipEntry; import java.util.zip.ZipException; import java.util.zip.ZipFile; public class Main { private static final String CLASSES_DEX = "classes.dex"; private boolean includeClasses; private String packageFilter; private int maxDepth = Integer.MAX_VALUE; private DexMethodCounts.Filter filter = DexMethodCounts.Filter.ALL; private String[] inputFileNames; /** * Entry point. */ public static void main(String[] args) { Main main = new Main(); main.run(args); } /** * Start things up. */ void run(String[] args) { try { parseArgs(args); List<String> fileNames = new ArrayList<String>(); for (String inputFileName : inputFileNames) { File file = new File(inputFileName); if (file.isDirectory()) { String dirPath = file.getAbsolutePath(); for (String fileInDir: file.list()){ fileNames.add(dirPath + File.separator + fileInDir); } } else { fileNames.add(inputFileName); } } for (String fileName : fileNames) { System.out.println("Processing " + fileName); RandomAccessFile raf = openInputFile(fileName); DexData dexData = new DexData(raf); dexData.load(); DexMethodCounts.generate( dexData, includeClasses, packageFilter, maxDepth, filter); raf.close(); } System.out.println("Overall method count: " + DexMethodCounts.overallCount); } catch (UsageException ue) { usage(); System.exit(2); } catch (IOException ioe) { if (ioe.getMessage() != null) { System.err.println("Failed: " + ioe); } System.exit(1); } catch (DexDataException dde) { /* a message was already reported, just bail quietly */ System.exit(1); } } /** * Opens an input file, which could be a .dex or a .jar/.apk with a * classes.dex inside. If the latter, we extract the contents to a * temporary file. * * @param fileName the name of the file to open */ RandomAccessFile openInputFile(String fileName) throws IOException { RandomAccessFile raf; raf = openInputFileAsZip(fileName); if (raf == null) { File inputFile = new File(fileName); raf = new RandomAccessFile(inputFile, "r"); } return raf; } /** * Tries to open an input file as a Zip archive (jar/apk) with a * "classes.dex" inside. * * @param fileName the name of the file to open * @return a RandomAccessFile for classes.dex, or null if the input file * is not a zip archive * @throws IOException if the file isn't found, or it's a zip and * classes.dex isn't found inside */ RandomAccessFile openInputFileAsZip(String fileName) throws IOException { ZipFile zipFile; /* * Try it as a zip file. */ try { zipFile = new ZipFile(fileName); } catch (FileNotFoundException fnfe) { /* not found, no point in retrying as non-zip */ System.err.println("Unable to open '" + fileName + "': " + fnfe.getMessage()); throw fnfe; } catch (ZipException ze) { /* not a zip */ return null; } /* * We know it's a zip; see if there's anything useful inside. A * failure here results in some type of IOException (of which * ZipException is a subclass). */ ZipEntry entry = zipFile.getEntry(CLASSES_DEX); if (entry == null) { System.err.println("Unable to find '" + CLASSES_DEX + "' in '" + fileName + "'"); zipFile.close(); throw new ZipException(); } InputStream zis = zipFile.getInputStream(entry); /* * Create a temp file to hold the DEX data, open it, and delete it * to ensure it doesn't hang around if we fail. */ File tempFile = File.createTempFile("dexdeps", ".dex"); //System.out.println("+++ using temp " + tempFile); RandomAccessFile raf = new RandomAccessFile(tempFile, "rw"); tempFile.delete(); /* * Copy all data from input stream to output file. */ byte copyBuf[] = new byte[32768]; int actual; while (true) { actual = zis.read(copyBuf); if (actual == -1) break; raf.write(copyBuf, 0, actual); } zis.close(); raf.seek(0); return raf; } void parseArgs(String[] args) { int idx; for (idx = 0; idx < args.length; idx++) { String arg = args[idx]; if (arg.equals("--") || !arg.startsWith("--")) { break; } else if (arg.equals("--include-classes")) { includeClasses = true; } else if (arg.startsWith("--package-filter=")) { packageFilter = arg.substring(arg.indexOf('=') + 1); } else if (arg.startsWith("--max-depth=")) { maxDepth = Integer.parseInt(arg.substring(arg.indexOf('=') + 1)); } else if (arg.startsWith("--filter=")) { filter = Enum.valueOf( DexMethodCounts.Filter.class, arg.substring(arg.indexOf('=') + 1).toUpperCase()); } else { System.err.println("Unknown option '" + arg + "'"); throw new UsageException(); } } // We expect at least one more argument (file name). int fileCount = args.length - idx; if (fileCount == 0) { throw new UsageException(); } inputFileNames = new String[fileCount]; System.arraycopy(args, idx, inputFileNames, 0, fileCount); } void usage() { System.err.print( "DEX per-package/class method counts v1.0\n" + "Usage: dex-method-counts [options] <file.{dex,apk,jar,directory}> ...\n" + "Options:\n" + " --include-classes\n" + " --package-filter=com.foo.bar\n" + " --max-depth=N\n" ); } private static class UsageException extends RuntimeException {} }
src/info/persistent/dex/Main.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package info.persistent.dex; import com.android.dexdeps.DexData; import com.android.dexdeps.DexDataException; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.RandomAccessFile; import java.util.ArrayList; import java.util.List; import java.util.zip.ZipEntry; import java.util.zip.ZipException; import java.util.zip.ZipFile; public class Main { private static final String CLASSES_DEX = "classes.dex"; private boolean includeClasses; private String packageFilter; private int maxDepth = Integer.MAX_VALUE; private DexMethodCounts.Filter filter = DexMethodCounts.Filter.ALL; private String[] inputFileNames; /** * Entry point. */ public static void main(String[] args) { Main main = new Main(); main.run(args); } /** * Start things up. */ void run(String[] args) { try { parseArgs(args); List<String> fileNames = new ArrayList<String>(); for (String inputFileName : inputFileNames) { File file = new File(inputFileName); if (file.isDirectory()) { String dirPath = file.getAbsolutePath(); for (String fileInDir: file.list()){ fileNames.add(dirPath + File.separator + fileInDir); } } else { fileNames.add(inputFileName); } } for (String fileName : fileNames) { System.out.println("Processing " + fileName); RandomAccessFile raf = openInputFile(fileName); DexData dexData = new DexData(raf); dexData.load(); DexMethodCounts.generate( dexData, includeClasses, packageFilter, maxDepth, filter); raf.close(); } System.out.println("Overall method count: " + DexMethodCounts.overallCount); } catch (UsageException ue) { usage(); System.exit(2); } catch (IOException ioe) { if (ioe.getMessage() != null) { System.err.println("Failed: " + ioe); } System.exit(1); } catch (DexDataException dde) { /* a message was already reported, just bail quietly */ System.exit(1); } } /** * Opens an input file, which could be a .dex or a .jar/.apk with a * classes.dex inside. If the latter, we extract the contents to a * temporary file. * * @param fileName the name of the file to open */ RandomAccessFile openInputFile(String fileName) throws IOException { RandomAccessFile raf; raf = openInputFileAsZip(fileName); if (raf == null) { File inputFile = new File(fileName); raf = new RandomAccessFile(inputFile, "r"); } return raf; } /** * Tries to open an input file as a Zip archive (jar/apk) with a * "classes.dex" inside. * * @param fileName the name of the file to open * @return a RandomAccessFile for classes.dex, or null if the input file * is not a zip archive * @throws IOException if the file isn't found, or it's a zip and * classes.dex isn't found inside */ RandomAccessFile openInputFileAsZip(String fileName) throws IOException { ZipFile zipFile; /* * Try it as a zip file. */ try { zipFile = new ZipFile(fileName); } catch (FileNotFoundException fnfe) { /* not found, no point in retrying as non-zip */ System.err.println("Unable to open '" + fileName + "': " + fnfe.getMessage()); throw fnfe; } catch (ZipException ze) { /* not a zip */ return null; } /* * We know it's a zip; see if there's anything useful inside. A * failure here results in some type of IOException (of which * ZipException is a subclass). */ ZipEntry entry = zipFile.getEntry(CLASSES_DEX); if (entry == null) { System.err.println("Unable to find '" + CLASSES_DEX + "' in '" + fileName + "'"); zipFile.close(); throw new ZipException(); } InputStream zis = zipFile.getInputStream(entry); /* * Create a temp file to hold the DEX data, open it, and delete it * to ensure it doesn't hang around if we fail. */ File tempFile = File.createTempFile("dexdeps", ".dex"); //System.out.println("+++ using temp " + tempFile); RandomAccessFile raf = new RandomAccessFile(tempFile, "rw"); tempFile.delete(); /* * Copy all data from input stream to output file. */ byte copyBuf[] = new byte[32768]; int actual; while (true) { actual = zis.read(copyBuf); if (actual == -1) break; raf.write(copyBuf, 0, actual); } zis.close(); raf.seek(0); return raf; } void parseArgs(String[] args) { int idx; for (idx = 0; idx < args.length; idx++) { String arg = args[idx]; if (arg.equals("--") || !arg.startsWith("--")) { break; } else if (arg.equals("--include-classes")) { includeClasses = true; } else if (arg.startsWith("--package-filter=")) { packageFilter = arg.substring(arg.indexOf('=') + 1); } else if (arg.startsWith("--max-depth=")) { maxDepth = Integer.parseInt(arg.substring(arg.indexOf('=') + 1)); } else if (arg.startsWith("--filter=")) { filter = Enum.valueOf( DexMethodCounts.Filter.class, arg.substring(arg.indexOf('=') + 1).toUpperCase()); } else { System.err.println("Unknown option '" + arg + "'"); throw new UsageException(); } } // We expect at least one more argument (file name). int fileCount = args.length - idx; if (fileCount == 0) { throw new UsageException(); } inputFileNames = new String[fileCount]; System.arraycopy(args, idx, inputFileNames, 0, fileCount); } void usage() { System.err.print( "DEX per-package/class method counts v1.0\n" + "Usage: dex-method-counts [options] <file.{dex,apk,jar,directory}> ...\n" + "Options:\n" + " --include-classes\n" + " --package-filter=com.foo.bar\n" + " --max-depth=N\n" ); } private static class UsageException extends RuntimeException {} }
whitespaces returned to original state
src/info/persistent/dex/Main.java
whitespaces returned to original state
<ide><path>rc/info/persistent/dex/Main.java <ide> * <ide> * @param fileName the name of the file to open <ide> * @return a RandomAccessFile for classes.dex, or null if the input file <del> * is not a zip archive <add> * is not a zip archive <ide> * @throws IOException if the file isn't found, or it's a zip and <ide> * classes.dex isn't found inside <ide> */ <ide> <ide> void usage() { <ide> System.err.print( <del> "DEX per-package/class method counts v1.0\n" + <del> "Usage: dex-method-counts [options] <file.{dex,apk,jar,directory}> ...\n" + <del> "Options:\n" + <del> " --include-classes\n" + <del> " --package-filter=com.foo.bar\n" + <del> " --max-depth=N\n" <add> "DEX per-package/class method counts v1.0\n" + <add> "Usage: dex-method-counts [options] <file.{dex,apk,jar,directory}> ...\n" + <add> "Options:\n" + <add> " --include-classes\n" + <add> " --package-filter=com.foo.bar\n" + <add> " --max-depth=N\n" <ide> ); <ide> } <ide>
Java
apache-2.0
9ab9e89ebd3ed5083d0a83a870ea9038060ef287
0
keepacom/api_backend
package com.keepa.api.backend.helper; import static com.keepa.api.backend.structs.Product.CsvType; /** * Provides methods to work on the Keepa price history CSV format. */ class ProductAnalyzer { /** * finds the extreme point in the specified interval * * @param csv value/price history csv * @param start start of the interval (keepa time minutes), can be 0. * @param end end of the interval (keepa time minutes), can be in the future (Integer.MAX_VALUE). * @param isMinimum whether to find the minimum or maximum * @return extremePoint (value/price) in the given interval or -1 if no extreme point was found. * @deprecated use {@link ProductAnalyzer#getExtremePointInInterval(int[], int, int, boolean, CsvType)} instead. */ public static int getExtremePointInInterval(int[] csv, int start, int end, boolean isMinimum) { if (csv == null || csv.length < 4 || csv[csv.length - 1] == -1 || csv[csv.length - 3] == -1) return -1; int extremeValue = -1; if (isMinimum) extremeValue = Integer.MAX_VALUE; for (int i = 0; i < csv.length; i += 2) { int date = csv[i]; if (date <= start) continue; if (date >= end) break; if (csv[i + 1] == -1) continue; if (isMinimum) extremeValue = Math.min(extremeValue, csv[i + 1]); else extremeValue = Math.max(extremeValue, csv[i + 1]); } if (extremeValue == Integer.MAX_VALUE) return -1; return extremeValue; } /** * finds the extreme point in the specified interval * * @param csv value/price history csv * @param start start of the interval (keepa time minutes), can be 0. * @param end end of the interval (keepa time minutes), can be in the future (Integer.MAX_VALUE). * @param isMinimum whether to find the minimum or maximum * @param type the type of the csv data. If the csv includes shipping costs the extreme point will be the landing price (price + shipping). * @return extremePoint (value/price)) in the given interval or -1 if no extreme point was found. If the csv includes shipping costs it will be the landing price (price + shipping). * @deprecated use {@link ProductAnalyzer#getExtremePointsInIntervalWithTime(int[], int, int, CsvType)} instead. */ public static int getExtremePointInInterval(int[] csv, int start, int end, boolean isMinimum, CsvType type) { int[] minMax = getExtremePointsInIntervalWithTime(csv, start, end, type); return minMax[isMinimum ? 1 : 3]; } /** * finds the extreme point in the specified interval * * @param csv value/price history csv * @param start start of the interval (keepa time minutes), can be 0. * @param end end of the interval (keepa time minutes), can be in the future (Integer.MAX_VALUE). * @param type the type of the csv data. If the csv includes shipping costs the extreme point will be the landing price (price + shipping). * @return extremePoints (time, lowest value/price, time, highest value/price) in the given interval or -1 if no extreme point was found. If the csv includes shipping costs it will be the landing price (price + shipping). */ public static int[] getExtremePointsInIntervalWithTime(int[] csv, int start, int end, CsvType type) { if (csv == null || start >= end || csv.length < (type.isWithShipping ? 6 : 4)) return new int[]{-1, -1, -1, -1}; int[] extremeValue = new int[]{-1, Integer.MAX_VALUE, -1, -1}; int lastTime = getLastTime(csv, type); int firstTime = csv[0]; if (lastTime == -1 || firstTime == -1 || firstTime > end) return new int[]{-1, -1, -1, -1}; if (firstTime > start) start = firstTime; int loopIncrement = (type.isWithShipping ? 3 : 2); int adjustedIndex = type.isWithShipping ? 2 : 1; for (int i = 1, j = csv.length; i < j; i += loopIncrement) { int c = csv[i]; int date = csv[i - 1]; if (date >= end) break; if (c != -1) { if (type.isWithShipping) { int s = csv[i + 1]; c += s < 0 ? 0 : s; } if (date >= start) { if (c < extremeValue[1]) { extremeValue[1] = c; extremeValue[0] = csv[i - 1]; } if (c > extremeValue[3]) { extremeValue[3] = c; extremeValue[2] = csv[i - 1]; } } else { boolean isValid = false; if (i == j - adjustedIndex) { isValid = true; } else { int nextDate = csv[i + adjustedIndex]; if (nextDate >= end || (nextDate >= start)) isValid = true; } if (isValid) { if (c < extremeValue[1]) { extremeValue[1] = c; extremeValue[0] = start; } if (c > extremeValue[3]) { extremeValue[3] = c; extremeValue[2] = start; } } } } } if (extremeValue[1] == Integer.MAX_VALUE) return new int[]{-1, -1, -1, -1}; return extremeValue; } /** * Get the last value/price change. * * @param csv value/price history csv * @return the last value/price change delta * @deprecated use {@link ProductAnalyzer#getDeltaLast(int[], CsvType)} instead. */ public static int getDeltaLast(int[] csv) { if (csv == null || csv.length < 4 || csv[csv.length - 1] == -1 || csv[csv.length - 3] == -1) return 0; return csv[csv.length - 1] - csv[csv.length - 3]; } /** * Get the last value/price change. * * @param csv value/price history csv * @param type the type of the csv data. If the csv includes shipping costs the extreme point will be the landing price (price + shipping). * @return the last value/price change delta. If the csv includes shipping costs it will be the delta of the the landing prices (price + shipping). */ private static int getDeltaLast(int[] csv, CsvType type) { if (type.isWithShipping) { if (csv == null || csv.length < 6 || csv[csv.length - 1] == -1 || csv[csv.length - 5] == -1) return 0; int v = csv[csv.length - 5]; int s = csv[csv.length - 4]; int totalLast = v < 0 ? v : v + (s < 0 ? 0 : s); v = csv[csv.length - 2]; s = csv[csv.length - 1]; int totalCurrent = v < 0 ? v : v + (s < 0 ? 0 : s); return totalCurrent - totalLast; } else { if (csv == null || csv.length < 4 || csv[csv.length - 1] == -1 || csv[csv.length - 3] == -1) return 0; return csv[csv.length - 1] - csv[csv.length - 3]; } } /** * Get the last value/price. * * @param csv value/price history csv * @return the last value/price * @deprecated use {@link ProductAnalyzer#getLast(int[], CsvType)} instead. */ private static int getLast(int[] csv) { return csv == null || csv.length == 0 ? -1 : csv[csv.length - 1]; } /** * Get the last value/price. * * @param csv value/price history csv * @param type the type of the csv data. * @return the last value/price. If the csv includes shipping costs it will be the landing price (price + shipping). */ public static int getLast(int[] csv, CsvType type) { if (csv == null || csv.length == 0) return -1; if (type.isWithShipping) { int s = csv[csv.length - 1]; int v = csv[csv.length - 2]; return v < 0 ? v : v + (s < 0 ? 0 : s); } return csv[csv.length - 1]; } /** * Get the time (keepa time minutes) of the last entry. This does not correspond to the last update time, but to the last time we registered a price/value change. * * @param csv value/price history csv * @param type the type of the csv data. * @return keepa time minutes of the last entry */ public static int getLastTime(int[] csv, CsvType type) { return csv == null || csv.length == 0 ? -1 : csv[csv.length - (type.isWithShipping ? 3 : 2)]; } /** * Get the value/price at the specified time * * @param csv value/price history csv * @param time value/price lookup time (keepa time minutes) * @return the price/value of the product at the specified time. -1 if no value was found or if the product was out of stock. * @deprecated use {@link ProductAnalyzer#getValueAtTime(int[], int, CsvType)} instead. */ public static int getValueAtTime(int[] csv, int time) { if (csv == null || csv.length == 0) return -1; int i = 0; for (; i < csv.length; i += 2) { if (csv[i] > time) break; } if (i > csv.length) return getLast(csv); if (i < 2) return -1; return csv[i - 1]; } /** * Get the value/price at the specified time * * @param csv value/price history csv * @param time value/price lookup time (keepa time minutes) * @param type the type of the csv data. * @return the price or value of the product at the specified time. -1 if no value was found or if the product was out of stock. If the csv includes shipping costs it will be the landing price (price + shipping). */ public static int getValueAtTime(int[] csv, int time, CsvType type) { if (csv == null || csv.length == 0) return -1; int i = 0; int loopIncrement = (type.isWithShipping ? 3 : 2); for (; i < csv.length; i += loopIncrement) if (csv[i] > time) break; if (i > csv.length) return getLast(csv, type); if (i < loopIncrement) return -1; if (type.isWithShipping) { int v = csv[i - 2]; int s = csv[i - 1]; return v < 0 ? v : v + (s < 0 ? 0 : s); } return csv[i - 1]; } /** * Get the price and shipping cost at the specified time * * @param csv price with shipping history csv * @param time price lookup time (keepa time minutes) * @return int[price, shipping] - the price and shipping cost of the product at the specified time. [-1, -1] if no price was found or if the product was out of stock. */ public static int[] getPriceAndShippingAtTime(int[] csv, int time) { if (csv == null || csv.length == 0) return new int[]{-1, -1}; int i = 0; for (; i < csv.length; i += 3) { if (csv[i] > time) { break; } } if (i > csv.length) return getLastPriceAndShipping(csv); if (i < 3) return new int[]{-1, -1}; return new int[]{csv[i - 2], csv[i - 1]}; } /** * Get the last price and shipping cost. * * @param csv price with shipping history csv * @return int[price, shipping] - the last price and shipping cost. */ public static int[] getLastPriceAndShipping(int[] csv) { if (csv == null || csv.length < 3) return new int[]{-1, -1}; return new int[]{csv[csv.length - 2], csv[csv.length - 1]}; } /** * @param csv value/price history csv * @param time time to begin the search * @return the closest value/price found to the specified time. If the csv includes shipping costs it will be the landing price (price + shipping). * @deprecated use {@link ProductAnalyzer#getClosestValueAtTime(int[], int, CsvType)} instead. */ public static int getClosestValueAtTime(int[] csv, int time) { if (csv == null || csv.length == 0) return -1; int i = 0; for (; i < csv.length; i += 2) { if (csv[i] > time) break; } if (i > csv.length) return getLast(csv); if (i < 2) { if (csv.length < 3) return csv[1]; else i += 2; } if (csv[i - 1] != -1) return csv[i - 1]; else { for (; i < csv.length; i += 2) { if (csv[i - 1] != -1) break; } if (i > csv.length) return getLast(csv); if (i < 2) return -1; return csv[i - 1]; } } /** * @param csv value/price history csv * @param time time to begin the search * @param type the type of the csv data. * @return the closest value/price found to the specified time. If the csv includes shipping costs it will be the landing price (price + shipping). */ public static int getClosestValueAtTime(int[] csv, int time, CsvType type) { if (csv == null || csv.length == 0) return -1; int i = 0; int loopIncrement = (type.isWithShipping ? 3 : 2); for (; i < csv.length; i += loopIncrement) if (csv[i] > time) break; if (i > csv.length) return getLast(csv, type); if (i < loopIncrement) { if (type.isWithShipping) { if (csv.length < 4) { int v = csv[2]; int s = csv[1]; return v < 0 ? v : v + (s < 0 ? 0 : s); } else i += 3; } else { if (csv.length < 3) return csv[1]; else i += 2; } } if (type.isWithShipping) { if (csv[i - 2] != -1) { int v = csv[i - 2]; int s = csv[i - 1]; return v < 0 ? v : v + (s < 0 ? 0 : s); } else { for (; i < csv.length; i += loopIncrement) { if (csv[i - 2] != -1) break; } if (i > csv.length) return getLast(csv, type); if (i < 3) return -1; int v = csv[i - 2]; int s = csv[i - 1]; return v < 0 ? v : v + (s < 0 ? 0 : s); } } else { if (csv[i - 1] != -1) return csv[i - 1]; else { for (; i < csv.length; i += 2) { if (csv[i - 1] != -1) break; } if (i > csv.length) return getLast(csv, type); if (i < 2) return -1; return csv[i - 1]; } } } /** * finds the lowest and highest value/price of the csv history * * @param csv value/price history csv * @return [0] = low, [1] = high * @deprecated use {@link ProductAnalyzer#getLowestAndHighest(int[], CsvType)} instead. */ public static int[] getLowestAndHighest(int[] csv) { if (csv == null || csv.length < 6) { return new int[]{-1, -1}; } int[] lowHigh = new int[]{Integer.MAX_VALUE, -1}; for (int i = 0, k = csv.length; i < k; i = i + 2) { int v = csv[i + 1]; if (v == -1) continue; if (v < lowHigh[0]) lowHigh[0] = v; if (v > lowHigh[1]) lowHigh[1] = v; } if (lowHigh[0] == Integer.MAX_VALUE) lowHigh[0] = -1; return lowHigh; } /** * finds the lowest and highest value/price of the csv history * * @param csv value/price history csv * @param type the type of the csv data. * @return [0] = low, [1] = high. If the csv includes shipping costs the extreme point will be the landing price (price + shipping). [-1, -1] if insufficient data. */ public static int[] getLowestAndHighest(int[] csv, CsvType type) { int[] minMax = getExtremePointsInIntervalWithTime(csv, 0, Integer.MAX_VALUE, type); return new int[]{minMax[1], minMax[3]}; } /** * finds the lowest and highest value/price of the csv history including the dates of the occurrences (in keepa time minutes). * * @param csv value/price history csv * @param type the type of the csv data. * @return [0] = low time, [1] = low, [2] = high time, [3] = high. If the csv includes shipping costs the extreme point will be the landing price (price + shipping). [-1, -1, -1, -1] if insufficient data. */ public static int[] getLowestAndHighestWithTime(int[] csv, CsvType type) { return getExtremePointsInIntervalWithTime(csv, 0, Integer.MAX_VALUE, type); } /** * Returns a weighted mean of the products csv history in the last X days * * @param csv value/price history csv * @param days number of days the weighted mean will be calculated for (e.g. 90 days, 60 days, 30 days) * @return the weighted mean or -1 if insufficient history csv length (less than a day) * @deprecated use {@link ProductAnalyzer#calcWeightedMean(int[], int, double, CsvType)} instead. */ public static int calcWeightedMean(int[] csv, double days) { int avg = -1; int now = KeepaTime.nowMinutes(); if (csv == null || csv.length == 0) { return avg; } int size = csv.length; int duration = (csv[size - 2] - csv[0]) / 60; double count = 0; if (size < 4 || duration < 24) return avg; if (duration < 24 * days) days = Math.floor(duration / 24.0); for (int i = 1; i < size; i = i + 2) { int c = csv[i]; if (c != -1) { if (now - csv[i - 1] < days * 24 * 60) { if (i == 1) { continue; } if (avg == -1) { if (csv[i - 2] == -1) { avg = 0; } else { double tmpCount = (days * 24 * 60 - (now - csv[i - 1])) / (24 * 60.0); count = tmpCount; avg = (int) Math.floor(csv[i - 2] * tmpCount); } } if (i + 1 == size) { if (csv[i - 2] == -1) { continue; } double tmpCount = ((now - csv[size - 2]) / (24.0 * 60.0)); count += tmpCount; avg += c * tmpCount; } else { double tmpCount = ((csv[i + 1] - csv[i - 1]) / (24.0 * 60.0)); count += tmpCount; avg += c * tmpCount; } } else { if (i == size - 1 && csv[i] != -1) { count = 1; avg = csv[i]; } } } } if (avg != -1) { avg = (int) Math.floor(avg / count); } return avg; } /** * Returns a weighted mean of the products csv history in the last X days * * @param csv value/price history csv * @param now current keepa time minutes * @param days number of days the weighted mean will be calculated for (e.g. 90 days, 60 days, 30 days) * @param type the type of the csv data. * @return the weighted mean or -1 if insufficient history csv length (less than a day). If the csv includes shipping costs it will be the wieghted mean of the landing price (price + shipping). */ public static int calcWeightedMean(int[] csv, int now, double days, CsvType type) { int avg = -1; if (csv == null || csv.length == 0) return avg; int size = csv.length; int loopIncrement = (type.isWithShipping ? 3 : 2); int duration = (csv[size - loopIncrement] - csv[0]) / 60; double count = 0; if (size < 4 || duration < 24 * 7) return avg; if (duration < 24 * days) days = Math.floor(duration / 24.0); int adjustedIndex = type.isWithShipping ? 2 : 1; for (int i = 1, j = size; i < j; i = i + loopIncrement) { int c = csv[i]; if (c != -1) { if (type.isWithShipping) { int s = csv[i + 1]; c += s < 0 ? 0 : s; } if (now - csv[i - 1] < days * 24 * 60) { if (i == 1) { continue; } if (avg == -1) { if (csv[i - loopIncrement] == -1) { avg = 0; } else { double tmpCount = (days * 24 * 60 - (now - csv[i - 1])) / (24 * 60.0); count = tmpCount; int price = csv[i - loopIncrement]; if (type.isWithShipping) { int s = csv[i - 2]; price += s < 0 ? 0 : s; } avg = (int) Math.floor(price * tmpCount); } } if (i + adjustedIndex == j) { if (csv[i - loopIncrement] == -1) { continue; } double tmpCount = ((now - csv[j - loopIncrement]) / (24.0 * 60.0)); count += tmpCount; avg += c * tmpCount; } else { double tmpCount = ((csv[i + adjustedIndex] - csv[i - 1]) / (24.0 * 60.0)); count += tmpCount; avg += c * tmpCount; } } else { if (i == j - adjustedIndex && csv[i] != -1) { count = 1; avg = c; } } } } if (avg != -1) { if (count != 0) avg = (int) Math.floor(avg / count); else avg = -1; } return avg; } /** * Returns true if the CSV was out of stock in the given period. * * @param csv value/price history csv * @param start start of the interval (keepa time minutes), can be 0. * @param end end of the interval (keepa time minutes), can be in the future (Integer.MAX_VALUE). * @param type the type of the csv data. * @return was out of stock in interval, null if the csv is too short to tell. */ public static Boolean getOutOfStockInInterval(int[] csv, int start, int end, CsvType type) { if (type.isWithShipping) { if (csv == null || csv.length < 6) return null; } else if (start >= end || csv == null || csv.length < 4) return null; int loopIncrement = (type.isWithShipping ? 3 : 2); for (int i = 0; i < csv.length; i += loopIncrement) { int date = csv[i]; if (date <= start) continue; if (date >= end) break; if (csv[i + 1] == -1) return true; } return false; } /** * Returns a the percentage of time in the given interval the price type was out of stock * * @param csv value/price history csv * @param now current keepa time minutes * @param start start of the interval (keepa time minutes), can be 0. * @param end end of the interval (keepa time minutes), can be in the future (Integer.MAX_VALUE). * @param type the type of the csv data. * @param trackingSince the product object's trackingSince value * @return percentage between 0 and 100 or -1 if insufficient data. 100 = 100% out of stock in the interval. */ public static int getOutOfStockPercentageInInterval(int[] csv, int now, int start, int end, CsvType type, int trackingSince) { if (!type.isPrice) return -1; if (start >= end) return -1; if (csv == null || csv.length == 0) return -1; int size = csv.length; int loopIncrement = (type.isWithShipping ? 3 : 2); int lastTime = getLastTime(csv, type); int firstTime = csv[0]; if (lastTime == -1 || firstTime == -1 || firstTime > end || trackingSince > end) return -1; long count = 0; if (trackingSince > start) start = trackingSince; if (end > now) end = now; int adjustedIndex = type.isWithShipping ? 2 : 1; for (int i = 1, j = size; i < j; i += loopIncrement) { int c = csv[i]; int date = csv[i - 1]; if (date >= end) break; if (c != -1) { if (date >= start) { if (i == 1) { if (i + adjustedIndex == j) { return 0; } } int nextDate; if (i + adjustedIndex == j) { nextDate = now; } else { nextDate = csv[i + adjustedIndex]; if (nextDate > end) nextDate = end; } long tmpCount = nextDate - date; count += tmpCount; } else { if (i == j - adjustedIndex) { return 0; } else { int nextDate = csv[i + adjustedIndex]; if (nextDate >= end) return 0; if (nextDate >= start) count = nextDate - start; } } } } if (count > 0) count = 100 - (int) Math.floor((count * 100) / (end - start)); else if (count == 0) { count = 100; } return (int) count; } }
src/main/java/com/keepa/api/backend/helper/ProductAnalyzer.java
package com.keepa.api.backend.helper; import static com.keepa.api.backend.structs.Product.CsvType; /** * Provides methods to work on the Keepa price history CSV format. */ class ProductAnalyzer { /** * finds the extreme point in the specified interval * * @param csv value/price history csv * @param start start of the interval (keepa time minutes), can be 0. * @param end end of the interval (keepa time minutes), can be in the future (Integer.MAX_VALUE). * @param isMinimum whether to find the minimum or maximum * @return extremePoint (value/price) in the given interval or -1 if no extreme point was found. * @deprecated use {@link ProductAnalyzer#getExtremePointInInterval(int[], int, int, boolean, CsvType)} instead. */ public static int getExtremePointInInterval(int[] csv, int start, int end, boolean isMinimum) { if (csv == null || csv.length < 4 || csv[csv.length - 1] == -1 || csv[csv.length - 3] == -1) return -1; int extremeValue = -1; if (isMinimum) extremeValue = Integer.MAX_VALUE; for (int i = 0; i < csv.length; i += 2) { int date = csv[i]; if (date <= start) continue; if (date >= end) break; if (csv[i + 1] == -1) continue; if (isMinimum) extremeValue = Math.min(extremeValue, csv[i + 1]); else extremeValue = Math.max(extremeValue, csv[i + 1]); } if (extremeValue == Integer.MAX_VALUE) return -1; return extremeValue; } /** * finds the extreme point in the specified interval * * @param csv value/price history csv * @param start start of the interval (keepa time minutes), can be 0. * @param end end of the interval (keepa time minutes), can be in the future (Integer.MAX_VALUE). * @param isMinimum whether to find the minimum or maximum * @param type the type of the csv data. If the csv includes shipping costs the extreme point will be the landing price (price + shipping). * @return extremePoint (value/price)) in the given interval or -1 if no extreme point was found. If the csv includes shipping costs it will be the landing price (price + shipping). * @deprecated use {@link ProductAnalyzer#getExtremePointsInIntervalWithTime(int[], int, int, CsvType)} instead. */ public static int getExtremePointInInterval(int[] csv, int start, int end, boolean isMinimum, CsvType type) { int[] minMax = getExtremePointsInIntervalWithTime(csv, start, end, type); return minMax[isMinimum ? 1 : 3]; } /** * finds the extreme point in the specified interval * * @param csv value/price history csv * @param start start of the interval (keepa time minutes), can be 0. * @param end end of the interval (keepa time minutes), can be in the future (Integer.MAX_VALUE). * @param type the type of the csv data. If the csv includes shipping costs the extreme point will be the landing price (price + shipping). * @return extremePoints (time, lowest value/price, time, highest value/price) in the given interval or -1 if no extreme point was found. If the csv includes shipping costs it will be the landing price (price + shipping). */ public static int[] getExtremePointsInIntervalWithTime(int[] csv, int start, int end, CsvType type) { if (csv == null || start >= end || csv.length < (type.isWithShipping ? 6 : 4)) return new int[]{-1, -1, -1, -1}; int[] extremeValue = new int[]{-1, Integer.MAX_VALUE, -1, -1}; int lastTime = getLastTime(csv, type); int firstTime = csv[0]; if (lastTime == -1 || firstTime == -1 || firstTime > end) return new int[]{-1, -1, -1, -1}; if (firstTime > start) start = firstTime; int loopIncrement = (type.isWithShipping ? 3 : 2); int adjustedIndex = type.isWithShipping ? 2 : 1; for (int i = 1, j = csv.length; i < j; i += loopIncrement) { int c = csv[i]; int date = csv[i - 1]; if (date >= end) break; if (c != -1) { if (type.isWithShipping) { int s = csv[i + 1]; c += s < 0 ? 0 : s; } if (date >= start) { if (c < extremeValue[1]) { extremeValue[1] = c; extremeValue[0] = csv[i - 1]; } if (c > extremeValue[3]) { extremeValue[3] = c; extremeValue[2] = csv[i - 1]; } } else { boolean isValid = false; if (i == j - adjustedIndex) { isValid = true; } else { int nextDate = csv[i + adjustedIndex]; if (nextDate >= end || (nextDate >= start)) isValid = true; } if (isValid) { if (c < extremeValue[1]) { extremeValue[1] = c; extremeValue[0] = start; } if (c > extremeValue[3]) { extremeValue[3] = c; extremeValue[2] = start; } } } } } if (extremeValue[1] == Integer.MAX_VALUE) return new int[]{-1, -1, -1, -1}; return extremeValue; } /** * Get the last value/price change. * * @param csv value/price history csv * @return the last value/price change delta * @deprecated use {@link ProductAnalyzer#getDeltaLast(int[], CsvType)} instead. */ public static int getDeltaLast(int[] csv) { if (csv == null || csv.length < 4 || csv[csv.length - 1] == -1 || csv[csv.length - 3] == -1) return 0; return csv[csv.length - 1] - csv[csv.length - 3]; } /** * Get the last value/price change. * * @param csv value/price history csv * @param type the type of the csv data. If the csv includes shipping costs the extreme point will be the landing price (price + shipping). * @return the last value/price change delta. If the csv includes shipping costs it will be the delta of the the landing prices (price + shipping). */ private static int getDeltaLast(int[] csv, CsvType type) { if (type.isWithShipping) { if (csv == null || csv.length < 6 || csv[csv.length - 1] == -1 || csv[csv.length - 5] == -1) return 0; int v = csv[csv.length - 5]; int s = csv[csv.length - 4]; int totalLast = v < 0 ? v : v + (s < 0 ? 0 : s); v = csv[csv.length - 2]; s = csv[csv.length - 1]; int totalCurrent = v < 0 ? v : v + (s < 0 ? 0 : s); return totalCurrent - totalLast; } else { if (csv == null || csv.length < 4 || csv[csv.length - 1] == -1 || csv[csv.length - 3] == -1) return 0; return csv[csv.length - 1] - csv[csv.length - 3]; } } /** * Get the last value/price. * * @param csv value/price history csv * @return the last value/price * @deprecated use {@link ProductAnalyzer#getLast(int[], CsvType)} instead. */ private static int getLast(int[] csv) { return csv == null || csv.length == 0 ? -1 : csv[csv.length - 1]; } /** * Get the last value/price. * * @param csv value/price history csv * @param type the type of the csv data. * @return the last value/price. If the csv includes shipping costs it will be the landing price (price + shipping). */ public static int getLast(int[] csv, CsvType type) { if (csv == null || csv.length == 0) return -1; if (type.isWithShipping) { int s = csv[csv.length - 1]; int v = csv[csv.length - 2]; return v < 0 ? v : v + (s < 0 ? 0 : s); } return csv[csv.length - 1]; } /** * Get the time (keepa time minutes) of the last entry. This does not correspond to the last update time, but to the last time we registered a price/value change. * * @param csv value/price history csv * @param type the type of the csv data. * @return keepa time minutes of the last entry */ public static int getLastTime(int[] csv, CsvType type) { return csv == null || csv.length == 0 ? -1 : csv[csv.length - (type.isWithShipping ? 3 : 2)]; } /** * Get the value/price at the specified time * * @param csv value/price history csv * @param time value/price lookup time (keepa time minutes) * @return the price/value of the product at the specified time. -1 if no value was found or if the product was out of stock. * @deprecated use {@link ProductAnalyzer#getValueAtTime(int[], int, CsvType)} instead. */ public static int getValueAtTime(int[] csv, int time) { if (csv == null || csv.length == 0) return -1; int i = 0; for (; i < csv.length; i += 2) { if (csv[i] > time) break; } if (i > csv.length) return getLast(csv); if (i < 2) return -1; return csv[i - 1]; } /** * Get the value/price at the specified time * * @param csv value/price history csv * @param time value/price lookup time (keepa time minutes) * @param type the type of the csv data. * @return the price or value of the product at the specified time. -1 if no value was found or if the product was out of stock. If the csv includes shipping costs it will be the landing price (price + shipping). */ public static int getValueAtTime(int[] csv, int time, CsvType type) { if (csv == null || csv.length == 0) return -1; int i = 0; int loopIncrement = (type.isWithShipping ? 3 : 2); for (; i < csv.length; i += loopIncrement) if (csv[i] > time) break; if (i > csv.length) return getLast(csv, type); if (i < loopIncrement) return -1; if (type.isWithShipping) { int v = csv[i - 2]; int s = csv[i - 1]; return v < 0 ? v : v + (s < 0 ? 0 : s); } return csv[i - 1]; } /** * Get the price and shipping cost at the specified time * * @param csv price with shipping history csv * @param time price lookup time (keepa time minutes) * @return int[price, shipping] - the price and shipping cost of the product at the specified time. [-1, -1] if no price was found or if the product was out of stock. */ public static int[] getPriceAndShippingAtTime(int[] csv, int time) { if (csv == null || csv.length == 0) return new int[]{-1, -1}; int i = 0; for (; i < csv.length; i += 3) { if (csv[i] > time) { break; } } if (i > csv.length) return getLastPriceAndShipping(csv); if (i < 3) return new int[]{-1, -1}; return new int[]{csv[i - 2], csv[i - 1]}; } /** * Get the last price and shipping cost. * * @param csv price with shipping history csv * @return int[price, shipping] - the last price and shipping cost. */ public static int[] getLastPriceAndShipping(int[] csv) { if (csv == null || csv.length < 3) return new int[]{-1, -1}; return new int[]{csv[csv.length - 2], csv[csv.length - 1]}; } /** * @param csv value/price history csv * @param time time to begin the search * @return the closest value/price found to the specified time. If the csv includes shipping costs it will be the landing price (price + shipping). * @deprecated use {@link ProductAnalyzer#getClosestValueAtTime(int[], int, CsvType)} instead. */ public static int getClosestValueAtTime(int[] csv, int time) { if (csv == null || csv.length == 0) return -1; int i = 0; for (; i < csv.length; i += 2) { if (csv[i] > time) break; } if (i > csv.length) return getLast(csv); if (i < 2) { if (csv.length < 3) return csv[1]; else i += 2; } if (csv[i - 1] != -1) return csv[i - 1]; else { for (; i < csv.length; i += 2) { if (csv[i - 1] != -1) break; } if (i > csv.length) return getLast(csv); if (i < 2) return -1; return csv[i - 1]; } } /** * @param csv value/price history csv * @param time time to begin the search * @param type the type of the csv data. * @return the closest value/price found to the specified time. If the csv includes shipping costs it will be the landing price (price + shipping). */ public static int getClosestValueAtTime(int[] csv, int time, CsvType type) { if (csv == null || csv.length == 0) return -1; int i = 0; int loopIncrement = (type.isWithShipping ? 3 : 2); for (; i < csv.length; i += loopIncrement) if (csv[i] > time) break; if (i > csv.length) return getLast(csv, type); if (i < loopIncrement) { if (type.isWithShipping) { if (csv.length < 4) { int v = csv[2]; int s = csv[1]; return v < 0 ? v : v + (s < 0 ? 0 : s); } else i += 3; } else { if (csv.length < 3) return csv[1]; else i += 2; } } if (type.isWithShipping) { if (csv[i - 2] != -1) { int v = csv[i - 2]; int s = csv[i - 1]; return v < 0 ? v : v + (s < 0 ? 0 : s); } else { for (; i < csv.length; i += loopIncrement) { if (csv[i - 2] != -1) break; } if (i > csv.length) return getLast(csv, type); if (i < 3) return -1; int v = csv[i - 2]; int s = csv[i - 1]; return v < 0 ? v : v + (s < 0 ? 0 : s); } } else { if (csv[i - 1] != -1) return csv[i - 1]; else { for (; i < csv.length; i += 2) { if (csv[i - 1] != -1) break; } if (i > csv.length) return getLast(csv, type); if (i < 2) return -1; return csv[i - 1]; } } } /** * finds the lowest and highest value/price of the csv history * * @param csv value/price history csv * @return [0] = low, [1] = high * @deprecated use {@link ProductAnalyzer#getLowestAndHighest(int[], CsvType)} instead. */ public static int[] getLowestAndHighest(int[] csv) { if (csv == null || csv.length < 6) { return new int[]{-1, -1}; } int[] lowHigh = new int[]{Integer.MAX_VALUE, -1}; for (int i = 0, k = csv.length; i < k; i = i + 2) { int v = csv[i + 1]; if (v == -1) continue; if (v < lowHigh[0]) lowHigh[0] = v; if (v > lowHigh[1]) lowHigh[1] = v; } if (lowHigh[0] == Integer.MAX_VALUE) lowHigh[0] = -1; return lowHigh; } /** * finds the lowest and highest value/price of the csv history * * @param csv value/price history csv * @param type the type of the csv data. * @return [0] = low, [1] = high. If the csv includes shipping costs the extreme point will be the landing price (price + shipping). [-1, -1] if insufficient data. */ public static int[] getLowestAndHighest(int[] csv, CsvType type) { int[] minMax = getExtremePointsInIntervalWithTime(csv, 0, Integer.MAX_VALUE, type); return new int[]{minMax[1], minMax[3]}; } /** * finds the lowest and highest value/price of the csv history including the dates of the occurrences (in keepa time minutes). * * @param csv value/price history csv * @param type the type of the csv data. * @return [0] = low time, [1] = low, [2] = high time, [3] = high. If the csv includes shipping costs the extreme point will be the landing price (price + shipping). [-1, -1, -1, -1] if insufficient data. */ public static int[] getLowestAndHighestWithTime(int[] csv, CsvType type) { return getExtremePointsInIntervalWithTime(csv, 0, Integer.MAX_VALUE, type); } /** * Returns a weighted mean of the products csv history in the last X days * * @param csv value/price history csv * @param days number of days the weighted mean will be calculated for (e.g. 90 days, 60 days, 30 days) * @return the weighted mean or -1 if insufficient history csv length (less than a day) * @deprecated use {@link ProductAnalyzer#calcWeightedMean(int[], int, double, CsvType)} instead. */ public static int calcWeightedMean(int[] csv, double days) { int avg = -1; int now = KeepaTime.nowMinutes(); if (csv == null || csv.length == 0) { return avg; } int size = csv.length; int duration = (csv[size - 2] - csv[0]) / 60; double count = 0; if (size < 4 || duration < 24) return avg; if (duration < 24 * days) days = Math.floor(duration / 24.0); for (int i = 1; i < size; i = i + 2) { int c = csv[i]; if (c != -1) { if (now - csv[i - 1] < days * 24 * 60) { if (i == 1) { continue; } if (avg == -1) { if (csv[i - 2] == -1) { avg = 0; } else { double tmpCount = (days * 24 * 60 - (now - csv[i - 1])) / (24 * 60.0); count = tmpCount; avg = (int) Math.floor(csv[i - 2] * tmpCount); } } if (i + 1 == size) { if (csv[i - 2] == -1) { continue; } double tmpCount = ((now - csv[size - 2]) / (24.0 * 60.0)); count += tmpCount; avg += c * tmpCount; } else { double tmpCount = ((csv[i + 1] - csv[i - 1]) / (24.0 * 60.0)); count += tmpCount; avg += c * tmpCount; } } else { if (i == size - 1 && csv[i] != -1) { count = 1; avg = csv[i]; } } } } if (avg != -1) { avg = (int) Math.floor(avg / count); } return avg; } /** * Returns a weighted mean of the products csv history in the last X days * * @param csv value/price history csv * @param days number of days the weighted mean will be calculated for (e.g. 90 days, 60 days, 30 days) * @return the weighted mean or -1 if insufficient history csv length (less than a day). If the csv includes shipping costs it will be the wieghted mean of the landing price (price + shipping). */ public static int calcWeightedMean(int[] csv, int now, double days, CsvType type) { int avg = -1; if (csv == null || csv.length == 0) return avg; int size = csv.length; int loopIncrement = (type.isWithShipping ? 3 : 2); int duration = (csv[size - loopIncrement] - csv[0]) / 60; double count = 0; if (size < 4 || duration < 24 * 7) return avg; if (duration < 24 * days) days = Math.floor(duration / 24.0); int adjustedIndex = type.isWithShipping ? 2 : 1; for (int i = 1, j = size; i < j; i = i + loopIncrement) { int c = csv[i]; if (c != -1) { if (type.isWithShipping) { int s = csv[i + 1]; c += s < 0 ? 0 : s; } if (now - csv[i - 1] < days * 24 * 60) { if (i == 1) { continue; } if (avg == -1) { if (csv[i - loopIncrement] == -1) { avg = 0; } else { double tmpCount = (days * 24 * 60 - (now - csv[i - 1])) / (24 * 60.0); count = tmpCount; int price = csv[i - loopIncrement]; if (type.isWithShipping) { int s = csv[i - 2]; price += s < 0 ? 0 : s; } avg = (int) Math.floor(price * tmpCount); } } if (i + adjustedIndex == j) { if (csv[i - loopIncrement] == -1) { continue; } double tmpCount = ((now - csv[j - loopIncrement]) / (24.0 * 60.0)); count += tmpCount; avg += c * tmpCount; } else { double tmpCount = ((csv[i + adjustedIndex] - csv[i - 1]) / (24.0 * 60.0)); count += tmpCount; avg += c * tmpCount; } } else { if (i == j - adjustedIndex && csv[i] != -1) { count = 1; avg = c; } } } } if (avg != -1) { if (count != 0) avg = (int) Math.floor(avg / count); else avg = -1; } return avg; } /** * Returns true if the CSV was out of stock in the given period. * * @param csv value/price history csv * @param start start of the interval (keepa time minutes), can be 0. * @param end end of the interval (keepa time minutes), can be in the future (Integer.MAX_VALUE). * @param type the type of the csv data. * @return was out of stock in interval, null if the csv is too short to tell. */ public static Boolean getOutOfStockInInterval(int[] csv, int start, int end, CsvType type) { if (type.isWithShipping) { if (csv == null || csv.length < 6) return null; } else if (start >= end || csv == null || csv.length < 4) return null; int loopIncrement = (type.isWithShipping ? 3 : 2); for (int i = 0; i < csv.length; i += loopIncrement) { int date = csv[i]; if (date <= start) continue; if (date >= end) break; if (csv[i + 1] == -1) return true; } return false; } public static int getOutOfStockPercentageInInterval(int[] v, int now, int start, int end, CsvType type, int trackingSince) { if (!type.isPrice) return -1; if (start >= end) return -1; if (v == null || v.length == 0) return -1; int size = v.length; int loopIncrement = (type.isWithShipping ? 3 : 2); int lastTime = getLastTime(v, type); int firstTime = v[0]; if (lastTime == -1 || firstTime == -1 || firstTime > end || trackingSince > end) return -1; long count = 0; if (trackingSince > start) start = trackingSince; if (end > now) end = now; int adjustedIndex = type.isWithShipping ? 2 : 1; for (int i = 1, j = size; i < j; i += loopIncrement) { int c = v[i]; int date = v[i - 1]; if (date >= end) break; if (c != -1) { if (date >= start) { if (i == 1) { if (i + adjustedIndex == j) { return 0; } } int nextDate; if (i + adjustedIndex == j) { nextDate = now; } else { nextDate = v[i + adjustedIndex]; if (nextDate > end) nextDate = end; } long tmpCount = nextDate - date; count += tmpCount; } else { if (i == j - adjustedIndex) { return 0; } else { int nextDate = v[i + adjustedIndex]; if (nextDate >= end) return 0; if (nextDate >= start) count = nextDate - start; } } } } if (count > 0) count = 100 - (int) Math.floor((count * 100) / (end - start)); else if (count == 0) { count = 100; } return (int) count; } }
added missing java doc
src/main/java/com/keepa/api/backend/helper/ProductAnalyzer.java
added missing java doc
<ide><path>rc/main/java/com/keepa/api/backend/helper/ProductAnalyzer.java <ide> * Returns a weighted mean of the products csv history in the last X days <ide> * <ide> * @param csv value/price history csv <add> * @param now current keepa time minutes <ide> * @param days number of days the weighted mean will be calculated for (e.g. 90 days, 60 days, 30 days) <add> * @param type the type of the csv data. <ide> * @return the weighted mean or -1 if insufficient history csv length (less than a day). If the csv includes shipping costs it will be the wieghted mean of the landing price (price + shipping). <ide> */ <ide> public static int calcWeightedMean(int[] csv, int now, double days, CsvType type) { <ide> return false; <ide> } <ide> <del> public static int getOutOfStockPercentageInInterval(int[] v, int now, int start, int end, CsvType type, int trackingSince) { <add> /** <add> * Returns a the percentage of time in the given interval the price type was out of stock <add> * <add> * @param csv value/price history csv <add> * @param now current keepa time minutes <add> * @param start start of the interval (keepa time minutes), can be 0. <add> * @param end end of the interval (keepa time minutes), can be in the future (Integer.MAX_VALUE). <add> * @param type the type of the csv data. <add> * @param trackingSince the product object's trackingSince value <add> * @return percentage between 0 and 100 or -1 if insufficient data. 100 = 100% out of stock in the interval. <add> */ <add> public static int getOutOfStockPercentageInInterval(int[] csv, int now, int start, int end, CsvType type, int trackingSince) { <ide> if (!type.isPrice) return -1; <ide> if (start >= end) return -1; <del> if (v == null || v.length == 0) <add> if (csv == null || csv.length == 0) <ide> return -1; <ide> <del> int size = v.length; <add> int size = csv.length; <ide> int loopIncrement = (type.isWithShipping ? 3 : 2); <ide> <del> int lastTime = getLastTime(v, type); <del> int firstTime = v[0]; <add> int lastTime = getLastTime(csv, type); <add> int firstTime = csv[0]; <ide> <ide> if (lastTime == -1 || firstTime == -1 || firstTime > end || trackingSince > end) return -1; <ide> <ide> int adjustedIndex = type.isWithShipping ? 2 : 1; <ide> <ide> for (int i = 1, j = size; i < j; i += loopIncrement) { <del> int c = v[i]; <del> int date = v[i - 1]; <add> int c = csv[i]; <add> int date = csv[i - 1]; <ide> <ide> if (date >= end) <ide> break; <ide> if (i + adjustedIndex == j) { <ide> nextDate = now; <ide> } else { <del> nextDate = v[i + adjustedIndex]; <add> nextDate = csv[i + adjustedIndex]; <ide> if (nextDate > end) <ide> nextDate = end; <ide> } <ide> if (i == j - adjustedIndex) { <ide> return 0; <ide> } else { <del> int nextDate = v[i + adjustedIndex]; <add> int nextDate = csv[i + adjustedIndex]; <ide> <ide> if (nextDate >= end) <ide> return 0;
JavaScript
mit
b01a5f3acc3ae6a83449d2393d2f5580259ba312
0
filamentgroup/auto-complete,filamentgroup/component,filamentgroup/auto-complete,filamentgroup/auto-complete
/* * simple auto-enhance-able component skeleton * Copyright (c) 2013 Filament Group, Inc. * Licensed under MIT */ (function( $ ){ var componentName = "component-name-here", enhancedAttr = "data-enhanced", initSelector = "." + componentName + ":not([" + enhancedAttr + "])"; $.fn[ componentName ] = function(){ return this.each( function(){ // make enhancements here }); }; // auto-init on enhance (which is called on domready) $( document ).bind( "enhance", function( e ){ var $sel = $( e.target ).is( initSelector ) ? $( e.target ) : $( initSelector, e.target ); $sel[ componentName ]().attr( enhancedAttr, "true" ); }); }( jQuery ));
component.js
/* * simple auto-enhance-able component skeleton * Copyright (c) 2013 Filament Group, Inc. * Licensed under MIT */ (function( $ ){ var componentName = "component-name-here", enhancedAttr = "data-enhanced", initSelector = "." + componentName + ":not([" + enhancedAttr + "])"; $.fn[ componentName ] = function(){ return this.each( function(){ // make enhancements here }); }; // auto-init on enhance (which is called on domready) $( document ).bind( "enhance", function( e ){ $( initSelector, e.target )[ componentName ]().attr( enhancedAttr, true ); }); }( jQuery ));
make sure children are selected as well
component.js
make sure children are selected as well
<ide><path>omponent.js <ide> <ide> // auto-init on enhance (which is called on domready) <ide> $( document ).bind( "enhance", function( e ){ <del> $( initSelector, e.target )[ componentName ]().attr( enhancedAttr, true ); <add> var $sel = $( e.target ).is( initSelector ) ? $( e.target ) : $( initSelector, e.target ); <add> $sel[ componentName ]().attr( enhancedAttr, "true" ); <ide> }); <ide> <ide> }( jQuery ));
Java
apache-2.0
53b631670500f9b138fa6294751441b7d2310f0c
0
joansmith/dmix,jcnoir/dmix,0359xiaodong/dmix,abarisain/dmix,hurzl/dmix,0359xiaodong/dmix,abarisain/dmix,jcnoir/dmix,hurzl/dmix,joansmith/dmix
/* * Copyright (C) 2010-2014 The MPDroid Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.namelessdev.mpdroid; import org.a0z.mpd.MPDStatus; import org.a0z.mpd.event.StatusChangeListener; import org.a0z.mpd.exception.MPDServerException; import android.app.Service; import android.content.Context; import android.content.Intent; import android.media.AudioManager; import android.media.AudioManager.OnAudioFocusChangeListener; import android.media.MediaPlayer; import android.media.MediaPlayer.OnCompletionListener; import android.media.MediaPlayer.OnErrorListener; import android.media.MediaPlayer.OnPreparedListener; import android.os.Handler; import android.os.IBinder; import android.os.Message; import android.os.PowerManager; import android.os.StrictMode; import android.telephony.PhoneStateListener; import android.telephony.TelephonyManager; import android.util.Log; import java.io.IOException; /** * StreamingService hooks Android's audio framework to the * user's MPD streaming server to allow local audio playback. * * @author Arnaud Barisain Monrose (Dream_Team) * @version $Id: $ */ final public class StreamingService extends Service implements /** * OnInfoListener is not used because it is broken (never gets called, ever).. * OnBufferingUpdateListener is not used because it depends on a stream completion time. */ OnAudioFocusChangeListener, OnCompletionListener, OnErrorListener, OnPreparedListener, StatusChangeListener { private static final String TAG = "StreamingService"; private static final String FULLY_QUALIFIED_NAME = "com.namelessdev.mpdroid." + TAG + "."; /** Kills (or hides) the notification if StreamingService started it. */ public static final String ACTION_NOTIFICATION_STOP = FULLY_QUALIFIED_NAME + "NOTIFICATION_STOP"; public static final String ACTION_START = FULLY_QUALIFIED_NAME + "START_STREAMING"; /** Keeps the notification alive, but puts it in non-streaming status. */ public static final String ACTION_STREAMING_STOP = FULLY_QUALIFIED_NAME + "STOP_STREAMING"; public static final String ACTION_BUFFERING_BEGIN = FULLY_QUALIFIED_NAME + "BUFFERING_BEGIN"; public static final String ACTION_BUFFERING_END = FULLY_QUALIFIED_NAME + "BUFFERING_END"; private static boolean serviceWoundDown = false; final private Handler delayedStopHandler = new Handler() { @Override public void handleMessage(Message msg) { Log.d(TAG, "Stopping self by handler delay."); stopSelf(); } }; final private Handler delayedPlayHandler = new Handler() { @Override public void handleMessage(Message msg) { mediaPlayer.prepareAsync(); } }; private boolean serviceControlHandlersActive = false; private TelephonyManager mTelephonyManager = null; private MPDApplication app = null; private MediaPlayer mediaPlayer = null; private AudioManager audioManager = null; private boolean streamingStoppedForCall = false; private PowerManager.WakeLock mWakeLock = null; /** Is MPD playing? */ private boolean isPlaying = false; public static boolean isWoundDown() { return serviceWoundDown; } private static void serviceWoundDown(boolean value) { serviceWoundDown = value; } /** * Setup for the method which allows MPDroid to override behavior during * phone events. */ final private PhoneStateListener phoneStateListener = new PhoneStateListener() { @Override public void onCallStateChanged(int state, String incomingNumber) { switch (state) { case TelephonyManager.CALL_STATE_RINGING: final int ringVolume = audioManager.getStreamVolume(AudioManager.STREAM_RING); if (ringVolume == 0) { break; } /** Otherwise, continue */ case TelephonyManager.CALL_STATE_OFFHOOK: if (isPlaying) { streamingStoppedForCall = true; windDownResources(ACTION_STREAMING_STOP); } break; case TelephonyManager.CALL_STATE_IDLE: // Resume playback only if music was playing when the call was answered if (streamingStoppedForCall) { tryToStream(); streamingStoppedForCall = false; } break; } } }; /** Keep track of the number of errors encountered. */ private int errorIterator = 0; /** Keep track when mediaPlayer is preparing a stream */ private boolean preparingStreaming = false; /** * getState is a convenience method to safely retrieve a state object. * * @return A current state object. */ private String getState() { Log.d(TAG, "getState()"); String state = null; try { state = app.oMPDAsyncHelper.oMPD.getStatus().getState(); } catch (MPDServerException e) { Log.w(TAG, "Failed to get the current MPD state.", e); } return state; } /** * If streaming mode is activated this will setup the Android mediaPlayer * framework, register the media button events, register the remote control * client then setup and the framework streaming. */ private void tryToStream() { if (preparingStreaming) { Log.d(TAG, "A stream is already being prepared."); } else if (!isPlaying) { Log.d(TAG, "MPD is not currently playing, can't stream."); } else if (!app.getApplicationState().streamingMode) { Log.d(TAG, "streamingMode is not currently active, won't stream."); } else { beginStreaming(); } } private void beginStreaming() { Log.d(TAG, "StreamingService.beginStreaming()"); if (mediaPlayer == null) { windUpResources(); } final String streamSource = getStreamSource(); final int ASYNC_IDLE = 1500; preparingStreaming = true; stopControlHandlers(); sendIntent(ACTION_BUFFERING_BEGIN, NotificationService.class); /** * With MediaPlayer, there is a racy bug which affects, minimally, Android KitKat and lower. * If mediaPlayer.prepareAsync() is called too soon after mediaPlayer.setDataSource(), and * after the initial mediaPlayer.play(), general and non-specific errors are usually emitted * for the first few 100 milliseconds. * * Sometimes, these errors result in nagging Log errors, sometimes these errors result in * unrecoverable errors. This handler sets up a 1.5 second delay between * mediaPlayer.setDataSource() and mediaPlayer.AsyncPrepare() whether first play after * service start or not. * * The magic number here can be adjusted if there are any more problems. I have witnessed * these errors occur at 750ms, but never higher. It's worth doubling, even in optimal * conditions, stream buffering is pretty slow anyhow. Adjust if necessary. * * This order is very specific and if interrupted can cause big problems. */ try { mediaPlayer.reset(); mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC); mediaPlayer.setDataSource(streamSource); Message msg = delayedPlayHandler.obtainMessage(); delayedPlayHandler.sendMessageDelayed(msg, ASYNC_IDLE); /** Go to onPrepared() */ } catch (IOException e) { Log.e(TAG, "IO failure while trying to stream from: " + streamSource, e); windDownResources(ACTION_STREAMING_STOP); } catch (IllegalStateException e) { Log.e(TAG, "This is typically caused by a change in the server state during stream preparation.", e); windDownResources(ACTION_STREAMING_STOP); } finally { delayedPlayHandler.removeCallbacksAndMessages(delayedPlayHandler); } } @Override public void connectionStateChanged(boolean connected, boolean connectionLost) { } /** A method to send a quick message to another class. */ private void sendIntent(String msg, Class destination) { Log.d(TAG, "Sending intent " + msg + " to " + destination + "."); Intent i = new Intent(this, destination); i.setAction(msg); this.startService(i); } /** * A JMPDComm callback to be invoked during library state changes. * * @param updating true when updating, false when not updating. */ @Override public void libraryStateChanged(boolean updating) { } /** * Handle the change of volume if a notification, or any other kind of * interrupting audio event. * * @param focusChange The type of focus change. */ @Override final public void onAudioFocusChange(int focusChange) { Log.d(TAG, "StreamingService.onAudioFocusChange()"); if (focusChange == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT) { mediaPlayer.setVolume(0.2f, 0.2f); } else if (focusChange == AudioManager.AUDIOFOCUS_GAIN) { mediaPlayer.setVolume(1f, 1f); } else if (focusChange == AudioManager.AUDIOFOCUS_LOSS) { sendIntent(NotificationService.ACTION_PAUSE, NotificationService.class); } } @Override final public IBinder onBind(Intent intent) { return null; } /** * A MediaPlayer callback to be invoked when playback of a media source has completed. * * @param mp The MediaPlayer object that reached the end of the stream. */ @Override final public void onCompletion(MediaPlayer mp) { Log.d(TAG, "StreamingService.onCompletion()"); /** * If MPD is restarted during streaming, onCompletion() will be called. * onStateChange() won't be called. If we still detect playing, restart the stream. */ if (isPlaying) { tryToStream(); } else { /** The only way we make it here is with an empty playlist. */ windDownResources(ACTION_NOTIFICATION_STOP); } } final public void onCreate() { Log.d(TAG, "StreamingService.onCreate()"); app = (MPDApplication) getApplication(); if (app == null || !app.getApplicationState().streamingMode) { stopSelf(); } audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE); StrictMode.ThreadPolicy policy = new StrictMode.ThreadPolicy.Builder().permitAll().build(); StrictMode.setThreadPolicy(policy); app.oMPDAsyncHelper.addStatusChangeListener(this); app.addConnectionLock(this); isPlaying = MPDStatus.MPD_STATE_PLAYING.equals(getState()); } private String getStreamSource() { return "http://" + app.oMPDAsyncHelper.getConnectionSettings().getConnectionStreamingServer() + ":" + app.oMPDAsyncHelper.getConnectionSettings().iPortStreaming + "/" + app.oMPDAsyncHelper.getConnectionSettings().sSuffixStreaming; } /** * This happens at the beginning of beginStreaming() to populate all * necessary resources for handling the MediaPlayer stream. */ private void windUpResources() { Log.d(TAG, "Winding up resources."); serviceWoundDown(false); if (mWakeLock == null) { final PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE); mWakeLock = pm.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, TAG); mWakeLock.setReferenceCounted(false); } mWakeLock.acquire(); mTelephonyManager = (TelephonyManager) getSystemService(Context.TELEPHONY_SERVICE); mTelephonyManager.listen(phoneStateListener, PhoneStateListener.LISTEN_CALL_STATE); mediaPlayer = new MediaPlayer(); mediaPlayer.setOnCompletionListener(this); mediaPlayer.setOnPreparedListener(this); mediaPlayer.setOnErrorListener(this); } /** * windDownResources occurs after a delay or during stopSelf() to * clean up resources and give up focus to the phone and sound. */ private void windDownResources(String action) { Log.d(TAG, "Winding down resources."); serviceWoundDown(true); if (ACTION_STREAMING_STOP.equals(action)) { setupServiceControlHandlers(); } if (action != null) { sendIntent(action, NotificationService.class); } /** * Make sure that the first thing we do is releasing the wake lock */ if (mWakeLock != null) { mWakeLock.release(); } if (mTelephonyManager != null) { mTelephonyManager.listen(phoneStateListener, PhoneStateListener.LISTEN_NONE); } if (audioManager != null) { audioManager.abandonAudioFocus(this); } if (mediaPlayer != null) { if (mediaPlayer.isPlaying()) { mediaPlayer.stop(); } mediaPlayer.reset(); mediaPlayer.release(); mediaPlayer = null; } /** * If we got here due to an exception, try to stream * again until the error iterator runs out. */ if (preparingStreaming) { Log.d(TAG, "Stream had an error, trying to re-initiate streaming, try: " + errorIterator); errorIterator += 1; preparingStreaming = false; tryToStream(); } } @Override final public void onDestroy() { Log.d(TAG, "StreamingService.onDestroy()"); stopControlHandlers(); /** Remove the current MPD listeners */ app.oMPDAsyncHelper.removeStatusChangeListener(this); windDownResources(ACTION_NOTIFICATION_STOP); app.removeConnectionLock(this); app.getApplicationState().streamingMode = false; } /** * A MediaPlayer callback to be invoked when there has been an error during an asynchronous * operation (other errors will throw exceptions at method call time). * * @param mp The current mediaPlayer. * @param what The type of error that has occurred. * @param extra An extra code, specific to the error. Typically implementation dependent. * @return True if the method handled the error, false if it didn't. Returning false, or not * having an OnErrorListener at all, will cause the OnCompletionListener to be called. */ @Override final public boolean onError(MediaPlayer mp, int what, int extra) { Log.d(TAG, "StreamingService.onError()"); final int MAX_ERROR = 4; if (errorIterator > 0) { Log.d(TAG, "Error occurred while streaming, this is try #" + errorIterator + ", will attempt up to " + MAX_ERROR + " times."); } /** This keeps from continuous errors and battery draining. */ if (errorIterator > MAX_ERROR) { stopSelf(); } /** beginStreaming() will never start otherwise. */ preparingStreaming = false; /** Either way we need to stop streaming. */ windDownResources(ACTION_STREAMING_STOP); errorIterator += 1; return true; } /** * A MediaPlayer callback used when the media file is ready for playback. * * @param mp The MediaPlayer that is ready for playback. */ @Override final public void onPrepared(MediaPlayer mp) { Log.d(TAG, "StreamingService.onPrepared()"); final int focusResult = audioManager.requestAudioFocus(this, AudioManager.STREAM_MUSIC, AudioManager.AUDIOFOCUS_GAIN); /** * Not to be playing here is unlikely but it's a race we need to avoid. */ if (isPlaying && focusResult == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) { sendIntent(ACTION_BUFFERING_END, NotificationService.class); mediaPlayer.start(); } else { /** Because preparingStreaming is still set, this will reset the stream. */ windDownResources(ACTION_STREAMING_STOP); } preparingStreaming = false; errorIterator = 0; /** Reset the error iterator. */ } /** * Called by the system every time a client explicitly * starts the service by calling startService(Intent). */ @Override final public int onStartCommand(Intent intent, int flags, int startId) { Log.d(TAG, "StreamingService.onStartCommand()"); if (!app.getApplicationState().streamingMode) { stopSelf(); } switch (intent.getAction()) { case ACTION_START: tryToStream(); break; case ACTION_STREAMING_STOP: windDownResources(ACTION_STREAMING_STOP); break; } /** * We want this service to continue running until it is explicitly * stopped, so return sticky. */ return START_STICKY; } @Override public void playlistChanged(MPDStatus mpdStatus, int oldPlaylistVersion) { } @Override public void randomChanged(boolean random) { } @Override public void repeatChanged(boolean repeating) { } /** * A JMPDComm callback which is invoked on MPD status change. * * @param mpdStatus MPDStatus after event. * @param oldState Previous state. */ @Override final public void stateChanged(MPDStatus mpdStatus, String oldState) { Log.d(TAG, "StreamingService.stateChanged()"); final String state = mpdStatus.getState(); if (state != null) { switch (state) { case MPDStatus.MPD_STATE_PLAYING: stopControlHandlers(); isPlaying = true; tryToStream(); break; case MPDStatus.MPD_STATE_STOPPED: case MPDStatus.MPD_STATE_PAUSED: /** * If in the middle of stream preparation, "Buffering…" notification message * is likely. */ if (preparingStreaming) { sendIntent(ACTION_BUFFERING_END, NotificationService.class); } /** If the playlistLength is == 0, let onCompletion handle it. */ if (mpdStatus.getPlaylistLength() != 0) { windDownResources(ACTION_STREAMING_STOP); } isPlaying = false; break; } } } private void stopControlHandlers() { if (serviceControlHandlersActive) { Log.d(TAG, "Removing control handlers"); delayedStopHandler.removeCallbacksAndMessages(null); serviceControlHandlersActive = false; } } private void setupServiceControlHandlers() { if (!serviceControlHandlersActive) { Log.d(TAG, "Setting up control handlers"); final int STOP_IDLE_DELAY = 600000; /** 10 minutes */ /** * Stop handler so we don't annoy the user when they forget to turn streamingMode off. */ final Message msg = delayedStopHandler.obtainMessage(); delayedStopHandler.sendMessageDelayed(msg, STOP_IDLE_DELAY); serviceControlHandlersActive = true; } } @Override public void trackChanged(MPDStatus mpdStatus, int oldTrack) { } @Override public void volumeChanged(MPDStatus mpdStatus, int oldVolume) { } }
MPDroid/src/com/namelessdev/mpdroid/StreamingService.java
/* * Copyright (C) 2010-2014 The MPDroid Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.namelessdev.mpdroid; import org.a0z.mpd.MPDStatus; import org.a0z.mpd.event.StatusChangeListener; import org.a0z.mpd.exception.MPDServerException; import android.app.Service; import android.content.Context; import android.content.Intent; import android.media.AudioManager; import android.media.AudioManager.OnAudioFocusChangeListener; import android.media.MediaPlayer; import android.media.MediaPlayer.OnCompletionListener; import android.media.MediaPlayer.OnErrorListener; import android.media.MediaPlayer.OnPreparedListener; import android.os.Handler; import android.os.IBinder; import android.os.Message; import android.os.PowerManager; import android.os.StrictMode; import android.telephony.PhoneStateListener; import android.telephony.TelephonyManager; import android.util.Log; import java.io.IOException; /** * StreamingService hooks Android's audio framework to the * user's MPD streaming server to allow local audio playback. * * @author Arnaud Barisain Monrose (Dream_Team) * @version $Id: $ */ final public class StreamingService extends Service implements /** * OnInfoListener is not used because it is broken (never gets called, ever).. * OnBufferingUpdateListener is not used because it depends on a stream completion time. */ OnAudioFocusChangeListener, OnCompletionListener, OnErrorListener, OnPreparedListener, StatusChangeListener { private static final String TAG = "StreamingService"; private static final String FULLY_QUALIFIED_NAME = "com.namelessdev.mpdroid." + TAG + "."; /** Kills (or hides) the notification if StreamingService started it. */ public static final String ACTION_NOTIFICATION_STOP = FULLY_QUALIFIED_NAME + "NOTIFICATION_STOP"; public static final String ACTION_START = FULLY_QUALIFIED_NAME + "START_STREAMING"; /** Keeps the notification alive, but puts it in non-streaming status. */ public static final String ACTION_STREAMING_STOP = FULLY_QUALIFIED_NAME + "STOP_STREAMING"; public static final String ACTION_BUFFERING_BEGIN = FULLY_QUALIFIED_NAME + "BUFFERING_BEGIN"; public static final String ACTION_BUFFERING_END = FULLY_QUALIFIED_NAME + "BUFFERING_END"; private static boolean serviceWoundDown = false; final private Handler delayedStopHandler = new Handler() { @Override public void handleMessage(Message msg) { Log.d(TAG, "Stopping self by handler delay."); stopSelf(); } }; final private Handler delayedPlayHandler = new Handler() { @Override public void handleMessage(Message msg) { mediaPlayer.prepareAsync(); } }; private boolean serviceControlHandlersActive = false; private TelephonyManager mTelephonyManager = null; private MPDApplication app = null; private MediaPlayer mediaPlayer = null; private AudioManager audioManager = null; private boolean streamingStoppedForCall = false; private PowerManager.WakeLock mWakeLock = null; /** Is MPD playing? */ private boolean isPlaying = false; public static boolean isWoundDown() { return serviceWoundDown; } private static void serviceWoundDown(boolean value) { serviceWoundDown = value; } /** * Setup for the method which allows MPDroid to override behavior during * phone events. */ final private PhoneStateListener phoneStateListener = new PhoneStateListener() { @Override public void onCallStateChanged(int state, String incomingNumber) { switch (state) { case TelephonyManager.CALL_STATE_RINGING: final int ringVolume = audioManager.getStreamVolume(AudioManager.STREAM_RING); if (ringVolume == 0) { break; } /** Otherwise, continue */ case TelephonyManager.CALL_STATE_OFFHOOK: if (isPlaying) { streamingStoppedForCall = true; windDownResources(ACTION_STREAMING_STOP); } break; case TelephonyManager.CALL_STATE_IDLE: // Resume playback only if music was playing when the call was answered if (streamingStoppedForCall) { tryToStream(); streamingStoppedForCall = false; } break; } } }; /** Keep track of the number of errors encountered. */ private int errorIterator = 0; /** Keep track when mediaPlayer is preparing a stream */ private boolean preparingStreaming = false; /** * getState is a convenience method to safely retrieve a state object. * * @return A current state object. */ private String getState() { Log.d(TAG, "getState()"); String state = null; try { state = app.oMPDAsyncHelper.oMPD.getStatus().getState(); } catch (MPDServerException e) { Log.w(TAG, "Failed to get the current MPD state.", e); } return state; } /** * If streaming mode is activated this will setup the Android mediaPlayer * framework, register the media button events, register the remote control * client then setup and the framework streaming. */ private void tryToStream() { if (preparingStreaming) { Log.d(TAG, "A stream is already being prepared."); } else if (!isPlaying) { Log.d(TAG, "MPD is not currently playing, can't stream."); } else if (!app.getApplicationState().streamingMode) { Log.d(TAG, "streamingMode is not currently active, won't stream."); } else { beginStreaming(); } } private void beginStreaming() { Log.d(TAG, "StreamingService.beginStreaming()"); if (mediaPlayer == null) { windUpResources(); } final String streamSource = getStreamSource(); final int ASYNC_IDLE = 1500; preparingStreaming = true; stopControlHandlers(); sendIntent(ACTION_BUFFERING_BEGIN, NotificationService.class); /** * With MediaPlayer, there is a racy bug which affects, minimally, Android KitKat and lower. * If mediaPlayer.prepareAsync() is called too soon after mediaPlayer.setDataSource(), and * after the initial mediaPlayer.play(), general and non-specific errors are usually emitted * for the first few 100 milliseconds. * * Sometimes, these errors result in nagging Log errors, sometimes these errors result in * unrecoverable errors. This handler sets up a 1.5 second delay between * mediaPlayer.setDataSource() and mediaPlayer.AsyncPrepare() whether first play after * service start or not. * * The magic number here can be adjusted if there are any more problems. I have witnessed * these errors occur at 750ms, but never higher. It's worth doubling, even in optimal * conditions, stream buffering is pretty slow anyhow. Adjust if necessary. * * This order is very specific and if interrupted can cause big problems. */ try { mediaPlayer.reset(); mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC); mediaPlayer.setDataSource(streamSource); Message msg = delayedPlayHandler.obtainMessage(); delayedPlayHandler.sendMessageDelayed(msg, ASYNC_IDLE); /** Go to onPrepared() */ } catch (IOException e) { Log.e(TAG, "IO failure while trying to stream from: " + streamSource, e); windDownResources(ACTION_STREAMING_STOP); } catch (IllegalStateException e) { Log.e(TAG, "This is typically caused by a change in the server state during stream preparation.", e); windDownResources(ACTION_STREAMING_STOP); } finally { delayedPlayHandler.removeCallbacksAndMessages(delayedPlayHandler); } } @Override public void connectionStateChanged(boolean connected, boolean connectionLost) { } /** A method to send a quick message to another class. */ private void sendIntent(String msg, Class destination) { Log.d(TAG, "Sending intent " + msg + " to " + destination + "."); Intent i = new Intent(this, destination); i.setAction(msg); this.startService(i); } /** * A JMPDComm callback to be invoked during library state changes. * * @param updating true when updating, false when not updating. */ @Override public void libraryStateChanged(boolean updating) { } /** * Handle the change of volume if a notification, or any other kind of * interrupting audio event. * * @param focusChange The type of focus change. */ @Override final public void onAudioFocusChange(int focusChange) { Log.d(TAG, "StreamingService.onAudioFocusChange()"); if (focusChange == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT) { mediaPlayer.setVolume(0.2f, 0.2f); } else if (focusChange == AudioManager.AUDIOFOCUS_GAIN) { mediaPlayer.setVolume(1f, 1f); } else if (focusChange == AudioManager.AUDIOFOCUS_LOSS) { sendIntent(NotificationService.ACTION_PAUSE, NotificationService.class); } } @Override final public IBinder onBind(Intent intent) { return null; } /** * A MediaPlayer callback to be invoked when playback of a media source has completed. * * @param mp The MediaPlayer object that reached the end of the stream. */ @Override final public void onCompletion(MediaPlayer mp) { Log.d(TAG, "StreamingService.onCompletion()"); /** * If MPD is restarted during streaming, onCompletion() will be called. * onStateChange() won't be called. If we still detect playing, restart the stream. */ if (isPlaying) { tryToStream(); } else { /** The only way we make it here is with an empty playlist. */ windDownResources(ACTION_NOTIFICATION_STOP); } } final public void onCreate() { Log.d(TAG, "StreamingService.onCreate()"); app = (MPDApplication) getApplication(); if (app == null || !app.getApplicationState().streamingMode) { stopSelf(); } audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE); StrictMode.ThreadPolicy policy = new StrictMode.ThreadPolicy.Builder().permitAll().build(); StrictMode.setThreadPolicy(policy); app.oMPDAsyncHelper.addStatusChangeListener(this); app.addConnectionLock(this); isPlaying = MPDStatus.MPD_STATE_PLAYING.equals(getState()); } private String getStreamSource() { return "http://" + app.oMPDAsyncHelper.getConnectionSettings().getConnectionStreamingServer() + ":" + app.oMPDAsyncHelper.getConnectionSettings().iPortStreaming + "/" + app.oMPDAsyncHelper.getConnectionSettings().sSuffixStreaming; } /** * This happens at the beginning of beginStreaming() to populate all * necessary resources for handling the MediaPlayer stream. */ private void windUpResources() { Log.d(TAG, "Winding up resources."); serviceWoundDown(false); if (mWakeLock == null) { final PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE); mWakeLock = pm.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, TAG); mWakeLock.setReferenceCounted(false); } mWakeLock.acquire(); mTelephonyManager = (TelephonyManager) getSystemService(Context.TELEPHONY_SERVICE); mTelephonyManager.listen(phoneStateListener, PhoneStateListener.LISTEN_CALL_STATE); mediaPlayer = new MediaPlayer(); mediaPlayer.setOnCompletionListener(this); mediaPlayer.setOnPreparedListener(this); mediaPlayer.setOnErrorListener(this); } /** * windDownResources occurs after a delay or during stopSelf() to * clean up resources and give up focus to the phone and sound. */ private void windDownResources(String action) { Log.d(TAG, "Winding down resources."); serviceWoundDown(true); if (ACTION_STREAMING_STOP.equals(action)) { setupServiceControlHandlers(); } if (action != null) { sendIntent(action, NotificationService.class); } /** * Make sure that the first thing we do is releasing the wake lock */ if (mWakeLock != null) { mWakeLock.release(); } if (mTelephonyManager != null) { mTelephonyManager.listen(phoneStateListener, PhoneStateListener.LISTEN_NONE); } if (mediaPlayer != null) { if (mediaPlayer.isPlaying()) { mediaPlayer.stop(); } mediaPlayer.reset(); mediaPlayer.release(); mediaPlayer = null; } /** * If we got here due to an exception, try to stream * again until the error iterator runs out. */ if (preparingStreaming) { Log.d(TAG, "Stream had an error, trying to re-initiate streaming, try: " + errorIterator); errorIterator += 1; preparingStreaming = false; tryToStream(); } } @Override final public void onDestroy() { Log.d(TAG, "StreamingService.onDestroy()"); stopControlHandlers(); if (audioManager != null) { audioManager.abandonAudioFocus(this); } /** Remove the current MPD listeners */ app.oMPDAsyncHelper.removeStatusChangeListener(this); windDownResources(ACTION_NOTIFICATION_STOP); app.removeConnectionLock(this); app.getApplicationState().streamingMode = false; } /** * A MediaPlayer callback to be invoked when there has been an error during an asynchronous * operation (other errors will throw exceptions at method call time). * * @param mp The current mediaPlayer. * @param what The type of error that has occurred. * @param extra An extra code, specific to the error. Typically implementation dependent. * @return True if the method handled the error, false if it didn't. Returning false, or not * having an OnErrorListener at all, will cause the OnCompletionListener to be called. */ @Override final public boolean onError(MediaPlayer mp, int what, int extra) { Log.d(TAG, "StreamingService.onError()"); final int MAX_ERROR = 4; if (errorIterator > 0) { Log.d(TAG, "Error occurred while streaming, this is try #" + errorIterator + ", will attempt up to " + MAX_ERROR + " times."); } /** This keeps from continuous errors and battery draining. */ if (errorIterator > MAX_ERROR) { stopSelf(); } /** beginStreaming() will never start otherwise. */ preparingStreaming = false; /** Either way we need to stop streaming. */ windDownResources(ACTION_STREAMING_STOP); errorIterator += 1; return true; } /** * A MediaPlayer callback used when the media file is ready for playback. * * @param mp The MediaPlayer that is ready for playback. */ @Override final public void onPrepared(MediaPlayer mp) { Log.d(TAG, "StreamingService.onPrepared()"); final int focusResult = audioManager.requestAudioFocus(this, AudioManager.STREAM_MUSIC, AudioManager.AUDIOFOCUS_GAIN); /** * Not to be playing here is unlikely but it's a race we need to avoid. */ if (isPlaying && focusResult == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) { sendIntent(ACTION_BUFFERING_END, NotificationService.class); mediaPlayer.start(); } else { /** Because preparingStreaming is still set, this will reset the stream. */ windDownResources(ACTION_STREAMING_STOP); } preparingStreaming = false; errorIterator = 0; /** Reset the error iterator. */ } /** * Called by the system every time a client explicitly * starts the service by calling startService(Intent). */ @Override final public int onStartCommand(Intent intent, int flags, int startId) { Log.d(TAG, "StreamingService.onStartCommand()"); if (!app.getApplicationState().streamingMode) { stopSelf(); } switch (intent.getAction()) { case ACTION_START: tryToStream(); break; case ACTION_STREAMING_STOP: windDownResources(ACTION_STREAMING_STOP); break; } /** * We want this service to continue running until it is explicitly * stopped, so return sticky. */ return START_STICKY; } @Override public void playlistChanged(MPDStatus mpdStatus, int oldPlaylistVersion) { } @Override public void randomChanged(boolean random) { } @Override public void repeatChanged(boolean repeating) { } /** * A JMPDComm callback which is invoked on MPD status change. * * @param mpdStatus MPDStatus after event. * @param oldState Previous state. */ @Override final public void stateChanged(MPDStatus mpdStatus, String oldState) { Log.d(TAG, "StreamingService.stateChanged()"); final String state = mpdStatus.getState(); if (state != null) { switch (state) { case MPDStatus.MPD_STATE_PLAYING: stopControlHandlers(); isPlaying = true; tryToStream(); break; case MPDStatus.MPD_STATE_STOPPED: case MPDStatus.MPD_STATE_PAUSED: /** * If in the middle of stream preparation, "Buffering…" notification message * is likely. */ if (preparingStreaming) { sendIntent(ACTION_BUFFERING_END, NotificationService.class); } /** If the playlistLength is == 0, let onCompletion handle it. */ if (mpdStatus.getPlaylistLength() != 0) { windDownResources(ACTION_STREAMING_STOP); } isPlaying = false; break; } } } private void stopControlHandlers() { if (serviceControlHandlersActive) { Log.d(TAG, "Removing control handlers"); delayedStopHandler.removeCallbacksAndMessages(null); serviceControlHandlersActive = false; } } private void setupServiceControlHandlers() { if (!serviceControlHandlersActive) { Log.d(TAG, "Setting up control handlers"); final int STOP_IDLE_DELAY = 600000; /** 10 minutes */ /** * Stop handler so we don't annoy the user when they forget to turn streamingMode off. */ final Message msg = delayedStopHandler.obtainMessage(); delayedStopHandler.sendMessageDelayed(msg, STOP_IDLE_DELAY); serviceControlHandlersActive = true; } } @Override public void trackChanged(MPDStatus mpdStatus, int oldTrack) { } @Override public void volumeChanged(MPDStatus mpdStatus, int oldVolume) { } }
StreamingService: Abandon audio focus when winding down resources.
MPDroid/src/com/namelessdev/mpdroid/StreamingService.java
StreamingService: Abandon audio focus when winding down resources.
<ide><path>PDroid/src/com/namelessdev/mpdroid/StreamingService.java <ide> mTelephonyManager.listen(phoneStateListener, PhoneStateListener.LISTEN_NONE); <ide> } <ide> <add> if (audioManager != null) { <add> audioManager.abandonAudioFocus(this); <add> } <add> <ide> if (mediaPlayer != null) { <ide> if (mediaPlayer.isPlaying()) { <ide> mediaPlayer.stop(); <ide> Log.d(TAG, "StreamingService.onDestroy()"); <ide> <ide> stopControlHandlers(); <del> <del> if (audioManager != null) { <del> audioManager.abandonAudioFocus(this); <del> } <ide> <ide> /** Remove the current MPD listeners */ <ide> app.oMPDAsyncHelper.removeStatusChangeListener(this);
Java
mit
4b5bc7517a4501fd13d4dd70b766804b2d4fd07b
0
Armandorev/url-shortener,Armandorev/url-shortener,Armandorev/url-shortener,Armandorev/url-shortener
package benjamin.groehbiel.ch.shortener.redis; import benjamin.groehbiel.ch.JsonHelper; import benjamin.groehbiel.ch.shortener.ShortenerHandle; import com.fasterxml.jackson.core.JsonProcessingException; import org.springframework.stereotype.Service; import redis.clients.jedis.Jedis; import redis.clients.jedis.JedisPool; import redis.clients.jedis.JedisPoolConfig; import java.io.IOException; import java.net.URI; import java.util.Set; @Service public class RedisManager { public static final String HASH_PREFIX = "hash:"; public static final String COUNT_FIELD = "$count"; private JedisPool pool; public RedisManager() { JedisPoolConfig jedisPoolConfig = new JedisPoolConfig(); jedisPoolConfig.setMaxTotal(16); if (System.getProperty("redis.password").isEmpty()) { pool = createJedisPoolForTestEnv(jedisPoolConfig); } else { pool = createJedisPoolForProdEnv(jedisPoolConfig); } } public String getHashFor(String key) { try (Jedis jedis = pool.getResource()) { return jedis.get(key); } catch (Exception ex) { ex.printStackTrace(); return null; } } public void setUrlAndHash(String key, String value) { try (Jedis jedis = pool.getResource()) { jedis.set(key, value); } } public ShortenerHandle getHandleFor(String hash) throws IOException { hash = hash.replace(HASH_PREFIX, ""); try (Jedis jedis = pool.getResource()) { String json = jedis.get(HASH_PREFIX + hash); return JsonHelper.unserialize(json); } } public void storeHash(ShortenerHandle shortenerHandle) throws JsonProcessingException { URI url = shortenerHandle.getOriginalURI(); setHashAndHandle(shortenerHandle.getHash(), shortenerHandle); setUrlAndHash(url.toString(), shortenerHandle.getHash()); incrementByOne(COUNT_FIELD); } public void setHashAndHandle(String hash, ShortenerHandle value) throws JsonProcessingException { try (Jedis jedis = pool.getResource()) { jedis.set(HASH_PREFIX + hash, JsonHelper.serialize(value)); } } public Set<String> getHashes() { return getValuesFor(HASH_PREFIX + "*"); } public Long incrementByOne(String key) { try (Jedis jedis = pool.getResource()) { return jedis.incrBy(key, 1); } } public Long getHashCount() { try (Jedis jedis = pool.getResource()) { String shortenedSoFar = jedis.get(COUNT_FIELD); if (shortenedSoFar == null) { return 0L; } else { return Long.parseLong(shortenedSoFar); } } } private Set<String> getValuesFor(String regex) { try (Jedis jedis = pool.getResource()) { return jedis.keys(regex); } } public void clear() { try (Jedis jedis = pool.getResource()) { jedis.flushAll(); } } public void removeHash(String hashToDelete) throws IOException { ShortenerHandle hashHandle = getHandleFor(hashToDelete); URI originalURI = hashHandle.getOriginalURI(); try (Jedis jedis = pool.getResource()) { jedis.del(HASH_PREFIX + hashToDelete); jedis.del(originalURI.toString()); jedis.decrBy(COUNT_FIELD, 1); } } private JedisPool createJedisPoolForProdEnv(JedisPoolConfig jedisPoolConfig) { return new JedisPool(jedisPoolConfig, System.getProperty("redis.host"), Integer.parseInt(System.getProperty("redis.port")), 2000, System.getProperty("redis.password")); } private JedisPool createJedisPoolForTestEnv(JedisPoolConfig jedisPoolConfig) { return new JedisPool(jedisPoolConfig, System.getProperty("redis.host"), Integer.parseInt(System.getProperty("redis.port"))); } }
src/main/java/benjamin/groehbiel/ch/shortener/redis/RedisManager.java
package benjamin.groehbiel.ch.shortener.redis; import benjamin.groehbiel.ch.JsonHelper; import benjamin.groehbiel.ch.shortener.ShortenerHandle; import com.fasterxml.jackson.core.JsonProcessingException; import org.springframework.stereotype.Service; import redis.clients.jedis.Jedis; import redis.clients.jedis.JedisPool; import redis.clients.jedis.JedisPoolConfig; import java.io.IOException; import java.net.URI; import java.util.Set; @Service public class RedisManager { public static final String HASH_PREFIX = "hash:"; public static final String COUNT_FIELD = "$count"; private JedisPool pool; public RedisManager() { JedisPoolConfig jedisPoolConfig = new JedisPoolConfig(); jedisPoolConfig.setMaxTotal(16); pool = new JedisPool(jedisPoolConfig, System.getProperty("redis.host"), Integer.parseInt(System.getProperty("redis.port"))); } public String getHashFor(String key) { try (Jedis jedis = pool.getResource()) { return jedis.get(key); } catch (Exception ex) { ex.printStackTrace(); return null; } } public void setUrlAndHash(String key, String value) { try (Jedis jedis = pool.getResource()) { jedis.set(key, value); } } public ShortenerHandle getHandleFor(String hash) throws IOException { hash = hash.replace(HASH_PREFIX, ""); try (Jedis jedis = pool.getResource()) { String json = jedis.get(HASH_PREFIX + hash); return JsonHelper.unserialize(json); } } public void storeHash(ShortenerHandle shortenerHandle) throws JsonProcessingException { URI url = shortenerHandle.getOriginalURI(); setHashAndHandle(shortenerHandle.getHash(), shortenerHandle); setUrlAndHash(url.toString(), shortenerHandle.getHash()); incrementByOne(COUNT_FIELD); } public void setHashAndHandle(String hash, ShortenerHandle value) throws JsonProcessingException { try (Jedis jedis = pool.getResource()) { jedis.set(HASH_PREFIX + hash, JsonHelper.serialize(value)); } } public Set<String> getHashes() { return getValuesFor(HASH_PREFIX + "*"); } public Long incrementByOne(String key) { try (Jedis jedis = pool.getResource()) { return jedis.incrBy(key, 1); } } public Long getHashCount() { try (Jedis jedis = pool.getResource()) { String shortenedSoFar = jedis.get(COUNT_FIELD); if (shortenedSoFar == null) { return 0L; } else { return Long.parseLong(shortenedSoFar); } } } private Set<String> getValuesFor(String regex) { try (Jedis jedis = pool.getResource()) { return jedis.keys(regex); } } public void clear() { try (Jedis jedis = pool.getResource()) { jedis.flushAll(); } } public void close() { pool.destroy(); } public void removeHash(String hashToDelete) throws IOException { ShortenerHandle hashHandle = getHandleFor(hashToDelete); URI originalURI = hashHandle.getOriginalURI(); try (Jedis jedis = pool.getResource()) { jedis.del(HASH_PREFIX + hashToDelete); jedis.del(originalURI.toString()); jedis.decrBy(COUNT_FIELD, 1); } } }
Fix redis authentication issue in prod
src/main/java/benjamin/groehbiel/ch/shortener/redis/RedisManager.java
Fix redis authentication issue in prod
<ide><path>rc/main/java/benjamin/groehbiel/ch/shortener/redis/RedisManager.java <ide> public RedisManager() { <ide> JedisPoolConfig jedisPoolConfig = new JedisPoolConfig(); <ide> jedisPoolConfig.setMaxTotal(16); <del> pool = new JedisPool(jedisPoolConfig, System.getProperty("redis.host"), Integer.parseInt(System.getProperty("redis.port"))); <add> <add> if (System.getProperty("redis.password").isEmpty()) { <add> pool = createJedisPoolForTestEnv(jedisPoolConfig); <add> } else { <add> pool = createJedisPoolForProdEnv(jedisPoolConfig); <add> } <ide> } <ide> <ide> public String getHashFor(String key) { <ide> } <ide> } <ide> <del> public void close() { <del> pool.destroy(); <del> } <del> <ide> public void removeHash(String hashToDelete) throws IOException { <ide> ShortenerHandle hashHandle = getHandleFor(hashToDelete); <ide> URI originalURI = hashHandle.getOriginalURI(); <ide> jedis.decrBy(COUNT_FIELD, 1); <ide> } <ide> } <add> <add> private JedisPool createJedisPoolForProdEnv(JedisPoolConfig jedisPoolConfig) { <add> return new JedisPool(jedisPoolConfig, System.getProperty("redis.host"), Integer.parseInt(System.getProperty("redis.port")), 2000, System.getProperty("redis.password")); <add> } <add> <add> private JedisPool createJedisPoolForTestEnv(JedisPoolConfig jedisPoolConfig) { <add> return new JedisPool(jedisPoolConfig, System.getProperty("redis.host"), Integer.parseInt(System.getProperty("redis.port"))); <add> } <add> <ide> }
Java
bsd-2-clause
1f99df1e7fb5c0b94a87503d67f27afefbffc3ea
0
stapler/stapler,stapler/stapler,stapler/stapler,stapler/stapler,stapler/stapler
package org.kohsuke.stapler.lang; import org.kohsuke.stapler.Function; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Abstraction of class-like object, agnostic to languages. * * <p> * To support other JVM languages that use their own specific types to represent a class * (such as JRuby and Jython), we now use this object instead of {@link Class}. This allows * us to reuse much of the logic of class traversal/resource lookup across different languages. * * This is a convenient tuple so that we can pass around a single argument instead of two. * * @param <C> * Variable that represents the type of {@code Class} like object in this language. * * @author Kohsuke Kawaguchi */ public final class Klass<C> { public final C clazz; public final KlassNavigator<C> navigator; public Klass(C clazz, KlassNavigator<C> navigator) { this.clazz = clazz; this.navigator = navigator; } public URL getResource(String resourceName) { return navigator.getResource(clazz,resourceName); } public Iterable<Klass<?>> getAncestors() { return navigator.getAncestors(clazz); } public Klass<?> getSuperClass() { return navigator.getSuperClass(clazz); } public Class toJavaClass() { return navigator.toJavaClass(clazz); } /** * @since 1.220 */ public List<MethodRef> getDeclaredMethods() { return navigator.getDeclaredMethods(clazz); } public List<FieldRef> getDeclaredFields() { try { return navigator.getDeclaredFields(clazz); } catch (AbstractMethodError err) { // A plugin uses obsolete version of Stapler-dependent library (e.g. JRuby), which does not offer the method (JENKINS-39414) // TODO: what to do with Logging? The error must be VERY visible, but it will totally pollute system logs return Collections.emptyList(); } } /** * Gets all the public fields defined in this type, including super types. * * @see Class#getFields() */ public List<FieldRef> getFields() { Map<String,FieldRef> fields = new LinkedHashMap<String,FieldRef>(); for (Klass<?> k = this; k!=null; k=k.getSuperClass()) { for (FieldRef f : k.getDeclaredFields()) { String name = f.getName(); if (!fields.containsKey(name) && f.isRoutable()) { fields.put(name,f); } } } return new ArrayList<FieldRef>(fields.values()); } public List<Function> getFunctions() { return navigator.getFunctions(clazz); } public boolean isArray() { return navigator.isArray(clazz); } public Object getArrayElement(Object o, int index) throws IndexOutOfBoundsException { return navigator.getArrayElement(o,index); } public boolean isMap() { return navigator.isMap(clazz); } public Object getMapElement(Object o, String key) { return navigator.getMapElement(o,key); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Klass that = (Klass) o; return clazz.equals(that.clazz) && navigator.equals(that.navigator); } @Override public int hashCode() { return 31 * clazz.hashCode() + navigator.hashCode(); } @Override public String toString() { return clazz.toString(); } /** * Creates {@link Klass} from a Java {@link Class}. */ public static Klass<Class> java(Class c) { return c == null ? null : new Klass<Class>(c, KlassNavigator.JAVA); } }
core/src/main/java/org/kohsuke/stapler/lang/Klass.java
package org.kohsuke.stapler.lang; import org.kohsuke.stapler.Function; import java.net.URL; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Abstraction of class-like object, agnostic to languages. * * <p> * To support other JVM languages that use their own specific types to represent a class * (such as JRuby and Jython), we now use this object instead of {@link Class}. This allows * us to reuse much of the logic of class traversal/resource lookup across different languages. * * This is a convenient tuple so that we can pass around a single argument instead of two. * * @param <C> * Variable that represents the type of {@code Class} like object in this language. * * @author Kohsuke Kawaguchi */ public final class Klass<C> { public final C clazz; public final KlassNavigator<C> navigator; public Klass(C clazz, KlassNavigator<C> navigator) { this.clazz = clazz; this.navigator = navigator; } public URL getResource(String resourceName) { return navigator.getResource(clazz,resourceName); } public Iterable<Klass<?>> getAncestors() { return navigator.getAncestors(clazz); } public Klass<?> getSuperClass() { return navigator.getSuperClass(clazz); } public Class toJavaClass() { return navigator.toJavaClass(clazz); } /** * @since 1.220 */ public List<MethodRef> getDeclaredMethods() { return navigator.getDeclaredMethods(clazz); } public List<FieldRef> getDeclaredFields() { return navigator.getDeclaredFields(clazz); } /** * Gets all the public fields defined in this type, including super types. * * @see Class#getFields() */ public List<FieldRef> getFields() { Map<String,FieldRef> fields = new LinkedHashMap<String,FieldRef>(); for (Klass<?> k = this; k!=null; k=k.getSuperClass()) { for (FieldRef f : k.getDeclaredFields()) { String name = f.getName(); if (!fields.containsKey(name) && f.isRoutable()) { fields.put(name,f); } } } return new ArrayList<FieldRef>(fields.values()); } public List<Function> getFunctions() { return navigator.getFunctions(clazz); } public boolean isArray() { return navigator.isArray(clazz); } public Object getArrayElement(Object o, int index) throws IndexOutOfBoundsException { return navigator.getArrayElement(o,index); } public boolean isMap() { return navigator.isMap(clazz); } public Object getMapElement(Object o, String key) { return navigator.getMapElement(o,key); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Klass that = (Klass) o; return clazz.equals(that.clazz) && navigator.equals(that.navigator); } @Override public int hashCode() { return 31 * clazz.hashCode() + navigator.hashCode(); } @Override public String toString() { return clazz.toString(); } /** * Creates {@link Klass} from a Java {@link Class}. */ public static Klass<Class> java(Class c) { return c == null ? null : new Klass<Class>(c, KlassNavigator.JAVA); } }
[JENKINS-39414] - Prevent compatibility breakage in Klass#getDeclaredFields() when a Jenkins plugin uses obsolete Stapler lib
core/src/main/java/org/kohsuke/stapler/lang/Klass.java
[JENKINS-39414] - Prevent compatibility breakage in Klass#getDeclaredFields() when a Jenkins plugin uses obsolete Stapler lib
<ide><path>ore/src/main/java/org/kohsuke/stapler/lang/Klass.java <ide> <ide> import java.net.URL; <ide> import java.util.ArrayList; <add>import java.util.Collections; <ide> import java.util.LinkedHashMap; <ide> import java.util.List; <ide> import java.util.Map; <ide> } <ide> <ide> public List<FieldRef> getDeclaredFields() { <del> return navigator.getDeclaredFields(clazz); <add> try { <add> return navigator.getDeclaredFields(clazz); <add> } catch (AbstractMethodError err) { <add> // A plugin uses obsolete version of Stapler-dependent library (e.g. JRuby), which does not offer the method (JENKINS-39414) <add> // TODO: what to do with Logging? The error must be VERY visible, but it will totally pollute system logs <add> return Collections.emptyList(); <add> } <ide> } <ide> <ide> /**
Java
apache-2.0
76cc898413295857db977af9e974b67f851dd6ef
0
buckett/sakai-gitflow,bzhouduke123/sakai,OpenCollabZA/sakai,surya-janani/sakai,lorenamgUMU/sakai,ouit0408/sakai,joserabal/sakai,zqian/sakai,hackbuteer59/sakai,tl-its-umich-edu/sakai,kingmook/sakai,joserabal/sakai,Fudan-University/sakai,rodriguezdevera/sakai,noondaysun/sakai,rodriguezdevera/sakai,puramshetty/sakai,kwedoff1/sakai,puramshetty/sakai,clhedrick/sakai,wfuedu/sakai,buckett/sakai-gitflow,duke-compsci290-spring2016/sakai,duke-compsci290-spring2016/sakai,zqian/sakai,kingmook/sakai,ktakacs/sakai,kwedoff1/sakai,rodriguezdevera/sakai,kingmook/sakai,ktakacs/sakai,rodriguezdevera/sakai,frasese/sakai,hackbuteer59/sakai,liubo404/sakai,tl-its-umich-edu/sakai,conder/sakai,frasese/sakai,liubo404/sakai,udayg/sakai,conder/sakai,colczr/sakai,conder/sakai,surya-janani/sakai,conder/sakai,zqian/sakai,kwedoff1/sakai,udayg/sakai,wfuedu/sakai,kwedoff1/sakai,frasese/sakai,bzhouduke123/sakai,surya-janani/sakai,OpenCollabZA/sakai,surya-janani/sakai,conder/sakai,whumph/sakai,Fudan-University/sakai,hackbuteer59/sakai,OpenCollabZA/sakai,colczr/sakai,willkara/sakai,whumph/sakai,wfuedu/sakai,udayg/sakai,buckett/sakai-gitflow,pushyamig/sakai,noondaysun/sakai,OpenCollabZA/sakai,liubo404/sakai,wfuedu/sakai,puramshetty/sakai,pushyamig/sakai,puramshetty/sakai,buckett/sakai-gitflow,joserabal/sakai,puramshetty/sakai,bkirschn/sakai,noondaysun/sakai,OpenCollabZA/sakai,whumph/sakai,bzhouduke123/sakai,introp-software/sakai,bkirschn/sakai,rodriguezdevera/sakai,joserabal/sakai,noondaysun/sakai,tl-its-umich-edu/sakai,frasese/sakai,buckett/sakai-gitflow,liubo404/sakai,frasese/sakai,bzhouduke123/sakai,udayg/sakai,ouit0408/sakai,lorenamgUMU/sakai,willkara/sakai,willkara/sakai,frasese/sakai,conder/sakai,pushyamig/sakai,liubo404/sakai,tl-its-umich-edu/sakai,puramshetty/sakai,clhedrick/sakai,puramshetty/sakai,ktakacs/sakai,rodriguezdevera/sakai,ouit0408/sakai,frasese/sakai,bzhouduke123/sakai,kwedoff1/sakai,duke-compsci290-spring2016/sakai,frasese/sakai,clhedrick/sakai,introp-software/sakai,introp-software/sakai,bkirschn/sakai,duke-compsci290-spring2016/sakai,introp-software/sakai,willkara/sakai,willkara/sakai,Fudan-University/sakai,bzhouduke123/sakai,liubo404/sakai,kwedoff1/sakai,Fudan-University/sakai,noondaysun/sakai,tl-its-umich-edu/sakai,OpenCollabZA/sakai,colczr/sakai,surya-janani/sakai,introp-software/sakai,OpenCollabZA/sakai,bkirschn/sakai,clhedrick/sakai,hackbuteer59/sakai,liubo404/sakai,clhedrick/sakai,introp-software/sakai,kingmook/sakai,hackbuteer59/sakai,hackbuteer59/sakai,joserabal/sakai,noondaysun/sakai,udayg/sakai,kingmook/sakai,clhedrick/sakai,colczr/sakai,OpenCollabZA/sakai,whumph/sakai,introp-software/sakai,colczr/sakai,kingmook/sakai,zqian/sakai,buckett/sakai-gitflow,lorenamgUMU/sakai,zqian/sakai,kwedoff1/sakai,udayg/sakai,bkirschn/sakai,duke-compsci290-spring2016/sakai,duke-compsci290-spring2016/sakai,hackbuteer59/sakai,pushyamig/sakai,pushyamig/sakai,surya-janani/sakai,wfuedu/sakai,buckett/sakai-gitflow,Fudan-University/sakai,ktakacs/sakai,kingmook/sakai,zqian/sakai,rodriguezdevera/sakai,lorenamgUMU/sakai,willkara/sakai,puramshetty/sakai,noondaysun/sakai,clhedrick/sakai,bkirschn/sakai,kwedoff1/sakai,udayg/sakai,bkirschn/sakai,lorenamgUMU/sakai,introp-software/sakai,ktakacs/sakai,colczr/sakai,colczr/sakai,zqian/sakai,ktakacs/sakai,joserabal/sakai,whumph/sakai,bkirschn/sakai,lorenamgUMU/sakai,whumph/sakai,lorenamgUMU/sakai,duke-compsci290-spring2016/sakai,tl-its-umich-edu/sakai,liubo404/sakai,ktakacs/sakai,conder/sakai,tl-its-umich-edu/sakai,hackbuteer59/sakai,surya-janani/sakai,pushyamig/sakai,wfuedu/sakai,Fudan-University/sakai,Fudan-University/sakai,udayg/sakai,ktakacs/sakai,ouit0408/sakai,buckett/sakai-gitflow,rodriguezdevera/sakai,Fudan-University/sakai,noondaysun/sakai,whumph/sakai,whumph/sakai,ouit0408/sakai,pushyamig/sakai,ouit0408/sakai,duke-compsci290-spring2016/sakai,wfuedu/sakai,surya-janani/sakai,bzhouduke123/sakai,kingmook/sakai,ouit0408/sakai,ouit0408/sakai,tl-its-umich-edu/sakai,joserabal/sakai,lorenamgUMU/sakai,wfuedu/sakai,willkara/sakai,zqian/sakai,bzhouduke123/sakai,joserabal/sakai,pushyamig/sakai,clhedrick/sakai,conder/sakai,willkara/sakai,colczr/sakai
/********************************************************************************** * $URL$ * $Id$ *********************************************************************************** * * Copyright (c) 2005 The Regents of the University of Michigan, Trustees of Indiana University, * Board of Trustees of the Leland Stanford, Jr., University, and The MIT Corporation * * Licensed under the Educational Community License Version 1.0 (the "License"); * By obtaining, using and/or copying this Original Work, you agree that you have read, * understand, and will comply with the terms and conditions of the Educational Community License. * You may obtain a copy of the License at: * * http://cvs.sakaiproject.org/licenses/license_1_0.html * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE * AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * **********************************************************************************/ package org.sakaiproject.component.app.messageforums; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.api.app.messageforums.Area; public class MessageForumsAreaManagerImpl {//implements MessageForumsAreaManager { private static final Log LOG = LogFactory.getLog(MessageForumsAreaManagerImpl.class); public boolean isPrivateAreaEnabled() { return false; } public Area getPrivateArea() { return null; } public Area getDiscussionForumArea() { return null; } }
msgcntr/messageforums-component-shared/src/java/org/sakaiproject/component/app/messageforums/MessageForumsAreaManagerImpl.java
/********************************************************************************** * $URL$ * $Id$ *********************************************************************************** * * Copyright (c) 2005 The Regents of the University of Michigan, Trustees of Indiana University, * Board of Trustees of the Leland Stanford, Jr., University, and The MIT Corporation * * Licensed under the Educational Community License Version 1.0 (the "License"); * By obtaining, using and/or copying this Original Work, you agree that you have read, * understand, and will comply with the terms and conditions of the Educational Community License. * You may obtain a copy of the License at: * * http://cvs.sakaiproject.org/licenses/license_1_0.html * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE * AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * **********************************************************************************/ package org.sakaiproject.component.app.messageforums; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.api.app.messageforums.Area; import org.sakaiproject.api.app.messageforums.MessageForumsAreaManager; public class MessageForumsAreaManagerImpl implements MessageForumsAreaManager { private static final Log LOG = LogFactory.getLog(MessageForumsAreaManagerImpl.class); public boolean isPrivateAreaEnabled() { return false; } public Area getPrivateArea() { return null; } public Area getDiscussionForumArea() { return null; } }
New managers clean up remove type etc git-svn-id: c7b2716e7381eb8d90b7fc9f797026a6a3c5b5ba@3459 66ffb92e-73f9-0310-93c1-f5514f145a0a
msgcntr/messageforums-component-shared/src/java/org/sakaiproject/component/app/messageforums/MessageForumsAreaManagerImpl.java
New managers clean up remove type etc
<ide><path>sgcntr/messageforums-component-shared/src/java/org/sakaiproject/component/app/messageforums/MessageForumsAreaManagerImpl.java <ide> import org.apache.commons.logging.Log; <ide> import org.apache.commons.logging.LogFactory; <ide> import org.sakaiproject.api.app.messageforums.Area; <del>import org.sakaiproject.api.app.messageforums.MessageForumsAreaManager; <ide> <del>public class MessageForumsAreaManagerImpl implements MessageForumsAreaManager { <add> <add>public class MessageForumsAreaManagerImpl {//implements MessageForumsAreaManager { <ide> <ide> private static final Log LOG = LogFactory.getLog(MessageForumsAreaManagerImpl.class); <ide>
Java
mit
8fff1c2a111edd2f3caabbeb852ce29afc222cd0
0
project-recoin/PybossaTwitterController,project-recoin/PybossaTwitterController
package sociam.pybossa.twitter; import java.util.List; import sociam.pybossa.util.TwitterAccount; import twitter4j.Paging; import twitter4j.Status; import twitter4j.Twitter; import twitter4j.TwitterException; public class DeleteTweets { public static void main(String[] args) throws InterruptedException { Boolean res = removeTweets(); if (res == false) { removeTweets(); } else if (res == true) { System.out.println("All tweets are deleted"); } else { System.err.println("Error, exiting the script!!"); } } public static Boolean removeTweets() { Twitter twitter = TwitterAccount.setTwitterAccount(2); try { Paging p = new Paging(); p.setCount(200); List<Status> statuses = twitter.getUserTimeline(p); while (statuses != null) { for (Status status : statuses) { long id = status.getId(); twitter.destroyStatus(id); System.out.println("deleted"); Thread.sleep(5000); } System.out.println("Waiting 15 minutes before getting 200 responses"); statuses = twitter.getHomeTimeline(); Thread.sleep(900000); } return true; } catch (TwitterException e) { e.printStackTrace(); if (e.exceededRateLimitation()) { try { System.err.println("Twitter rate limit is exceeded!"); int waitfor = e.getRateLimitStatus().getSecondsUntilReset(); System.err.println("Waiting for " + (waitfor + 100) + " seconds"); Thread.sleep((waitfor * 1000) + 100000); removeTweets(); } catch (InterruptedException e2) { // TODO Auto-generated catch block e2.printStackTrace(); } } return null; } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); return null; } } }
src/main/java/sociam/pybossa/twitter/DeleteTweets.java
package sociam.pybossa.twitter; import java.util.List; import sociam.pybossa.util.TwitterAccount; import twitter4j.Paging; import twitter4j.Status; import twitter4j.Twitter; import twitter4j.TwitterException; public class DeleteTweets { public static void main(String[] args) throws InterruptedException { Boolean res = removeTweets(); if (res == false) { removeTweets(); } else if (res == true) { System.out.println("All tweets are deleted"); } else { System.err.println("Error, exiting the script!!"); } } public static Boolean removeTweets() { Twitter twitter = TwitterAccount.setTwitterAccount(2); try { Paging p = new Paging(); p.setCount(200); List<Status> statuses = twitter.getHomeTimeline(p); while (statuses != null) { for (Status status : statuses) { long id = status.getId(); twitter.destroyStatus(id); System.out.println("deleted"); Thread.sleep(5000); } System.out.println("Waiting 15 minutes before getting 200 responses"); statuses = twitter.getHomeTimeline(); Thread.sleep(900000); } return true; } catch (TwitterException e) { e.printStackTrace(); if (e.exceededRateLimitation()) { try { System.err.println("Twitter rate limit is exceeded!"); int waitfor = e.getRateLimitStatus().getSecondsUntilReset(); System.err.println("Waiting for " + (waitfor + 100) + " seconds"); Thread.sleep((waitfor * 1000) + 100000); removeTweets(); } catch (InterruptedException e2) { // TODO Auto-generated catch block e2.printStackTrace(); } } return null; } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); return null; } } }
no message
src/main/java/sociam/pybossa/twitter/DeleteTweets.java
no message
<ide><path>rc/main/java/sociam/pybossa/twitter/DeleteTweets.java <ide> try { <ide> Paging p = new Paging(); <ide> p.setCount(200); <del> List<Status> statuses = twitter.getHomeTimeline(p); <add> List<Status> statuses = twitter.getUserTimeline(p); <ide> <ide> while (statuses != null) { <ide> for (Status status : statuses) {
Java
apache-2.0
b97ae495e4c8c6ac50ff686c050c7f9cd3abaf05
0
gunnarmorling/aerogear-unifiedpush-server,baiwyc119/aerogear-unifiedpush-server,lfryc/aerogear-unifiedpush-server,diogoalbuquerque/aerogear-unifiedpush-server,danielpassos/aerogear-unifiedpush-server,matzew/aerogear-unifiedpush-server,sinarz/aerogear-unifiedpush-server,qmx/aerogear-unifiedpush-server,fheng/aerogear-unifiedpush-server,aerogear/aerogear-unifiedpush-server,lholmquist/aerogear-unified-push-server,baiwyc119/aerogear-unifiedpush-server,lholmquist/aerogear-unified-push-server,yvnicolas/aerogear-unifiedpush-server,yvnicolas/aerogear-unifiedpush-server,andresgalante/aerogear-unifiedpush-server,matzew/aerogear-unifiedpush-server,matzew/aerogear-unifiedpush-server,IvanGurtler/aerogear-unifiedpush-server,andresgalante/aerogear-unifiedpush-server,danielpassos/aerogear-unifiedpush-server,abstractj/aerogear-unifiedpush-server,edewit/aerogear-unifiedpush-server,yvnicolas/aerogear-unifiedpush-server,thradec/aerogear-unifiedpush-server,fheng/aerogear-unifiedpush-server,lfryc/aerogear-unifiedpush-server,lholmquist/aerogear-unified-push-server,andresgalante/aerogear-unifiedpush-server,julioa/aerogear-unifiedpush-server,baiwyc119/aerogear-unifiedpush-server,abstractj/aerogear-unifiedpush-server,baiwyc119/aerogear-unifiedpush-server,lfryc/aerogear-unifiedpush-server,diogoalbuquerque/aerogear-unifiedpush-server,aerobase/unifiedpush-server,aerobase/unifiedpush-server,lfryc/aerogear-unifiedpush-server,abstractj/aerogear-unifiedpush-server,fheng/aerogear-unifiedpush-server,sinarz/aerogear-unifiedpush-server,andresgalante/aerogear-unifiedpush-server,abstractj/aerogear-unifiedpush-server,sinarz/aerogear-unifiedpush-server,C-B4/unifiedpush-server,qmx/aerogear-unifiedpush-server,aerobase/unifiedpush-server,julioa/aerogear-unifiedpush-server,gunnarmorling/aerogear-unifiedpush-server,lholmquist/aerogear-unified-push-server,matzew/aerogear-unifiedpush-server,edewit/aerogear-unifiedpush-server,secondsun/aerogear-unifiedpush-server,secondsun/aerogear-unifiedpush-server,lholmquist/aerogear-unified-push-server,abstractj/aerogear-unifiedpush-server,julioa/aerogear-unifiedpush-server,C-B4/unifiedpush-server,idelpivnitskiy/aerogear-unifiedpush-server,secondsun/aerogear-unifiedpush-server,yvnicolas/aerogear-unifiedpush-server,edewit/aerogear-unifiedpush-server,aerogear/aerogear-unifiedpush-server,julioa/aerogear-unifiedpush-server,andresgalante/aerogear-unifiedpush-server,idelpivnitskiy/aerogear-unifiedpush-server,thradec/aerogear-unifiedpush-server,fheng/aerogear-unifiedpush-server,IvanGurtler/aerogear-unifiedpush-server,matzew/aerogear-unifiedpush-server,sinarz/aerogear-unifiedpush-server,thradec/aerogear-unifiedpush-server,thradec/aerogear-unifiedpush-server,idelpivnitskiy/aerogear-unifiedpush-server,abstractj/aerogear-unifiedpush-server,lfryc/aerogear-unifiedpush-server,lfryc/aerogear-unifiedpush-server,edewit/aerogear-unifiedpush-server,andresgalante/aerogear-unifiedpush-server,danielpassos/aerogear-unifiedpush-server,aerogear/aerogear-unifiedpush-server,julioa/aerogear-unifiedpush-server,IvanGurtler/aerogear-unifiedpush-server,sinarz/aerogear-unifiedpush-server,matzew/aerogear-unifiedpush-server,julioa/aerogear-unifiedpush-server,edewit/aerogear-unifiedpush-server,danielpassos/aerogear-unifiedpush-server,sinarz/aerogear-unifiedpush-server,C-B4/unifiedpush-server,edewit/aerogear-unifiedpush-server,danielpassos/aerogear-unifiedpush-server,idelpivnitskiy/aerogear-unifiedpush-server,gunnarmorling/aerogear-unifiedpush-server,danielpassos/aerogear-unifiedpush-server,yvnicolas/aerogear-unifiedpush-server,baiwyc119/aerogear-unifiedpush-server,lholmquist/aerogear-unified-push-server,C-B4/unifiedpush-server,fheng/aerogear-unifiedpush-server,thradec/aerogear-unifiedpush-server,yvnicolas/aerogear-unifiedpush-server,thradec/aerogear-unifiedpush-server,idelpivnitskiy/aerogear-unifiedpush-server,idelpivnitskiy/aerogear-unifiedpush-server,baiwyc119/aerogear-unifiedpush-server,fheng/aerogear-unifiedpush-server,diogoalbuquerque/aerogear-unifiedpush-server
/** * JBoss, Home of Professional Open Source * Copyright Red Hat, Inc., and individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.aerogear.unifiedpush.rest.security; import org.jboss.aerogear.unifiedpush.users.Developer; import org.jboss.aerogear.security.auth.AuthenticationManager; import org.jboss.aerogear.security.authz.IdentityManagement; import org.jboss.aerogear.security.exception.AeroGearSecurityException; import org.jboss.aerogear.security.picketlink.auth.CredentialMatcher; import org.picketlink.idm.model.basic.Agent; import org.picketlink.idm.model.basic.User; import javax.ejb.Stateless; import javax.inject.Inject; import javax.ws.rs.Consumes; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; @Stateless @Path("/auth") public class AuthenticationEndpoint { @Inject private AuthenticationManager<Agent> authenticationManager; @Inject private CredentialMatcher credential; @Inject private IdentityManagement configuration; @POST @Path("/login") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response login(final Developer developer) { authenticationManager.login(developer, developer.getPassword()); return Response.ok().build(); } @POST @Path("/logout") public Response logout() { try { authenticationManager.logout(); } catch (AeroGearSecurityException agse) { return Response.status(Status.UNAUTHORIZED).build(); } return Response.ok().build(); } @PUT @Path("/update") public Response updateUserPasswordAndRole(final Developer developer) { User simpleUser = (User) configuration.findByUsername(developer.getLoginName()); configuration.reset(simpleUser, developer.getPassword(), developer.getNewPassword()); return Response.ok().build(); } }
src/main/java/org/jboss/aerogear/unifiedpush/rest/security/AuthenticationEndpoint.java
/** * JBoss, Home of Professional Open Source * Copyright Red Hat, Inc., and individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.aerogear.unifiedpush.rest.security; import org.jboss.aerogear.unifiedpush.users.Developer; import org.jboss.aerogear.security.auth.AuthenticationManager; import org.jboss.aerogear.security.authz.IdentityManagement; import org.jboss.aerogear.security.exception.AeroGearSecurityException; import org.jboss.aerogear.security.picketlink.auth.CredentialMatcher; import org.picketlink.idm.model.basic.User; import javax.ejb.Stateless; import javax.inject.Inject; import javax.ws.rs.Consumes; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; @Stateless @Path("/auth") public class AuthenticationEndpoint { @Inject private AuthenticationManager authenticationManager; @Inject private CredentialMatcher credential; @Inject private IdentityManagement configuration; @POST @Path("/login") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response login(final Developer developer) { authenticationManager.login(developer, developer.getPassword()); return Response.ok().build(); } @POST @Path("/logout") public Response logout() { try { authenticationManager.logout(); } catch (AeroGearSecurityException agse) { return Response.status(Status.UNAUTHORIZED).build(); } return Response.ok().build(); } @PUT @Path("/update") public Response updateUserPasswordAndRole(final Developer developer) { User simpleUser = (User) configuration.findByUsername(developer.getLoginName()); configuration.reset(simpleUser, developer.getPassword(), developer.getNewPassword()); return Response.ok().build(); } }
Using Agent as parameterized type for the AuthenticationManager injection point
src/main/java/org/jboss/aerogear/unifiedpush/rest/security/AuthenticationEndpoint.java
Using Agent as parameterized type for the AuthenticationManager injection point
<ide><path>rc/main/java/org/jboss/aerogear/unifiedpush/rest/security/AuthenticationEndpoint.java <ide> import org.jboss.aerogear.security.authz.IdentityManagement; <ide> import org.jboss.aerogear.security.exception.AeroGearSecurityException; <ide> import org.jboss.aerogear.security.picketlink.auth.CredentialMatcher; <add>import org.picketlink.idm.model.basic.Agent; <ide> import org.picketlink.idm.model.basic.User; <ide> <ide> import javax.ejb.Stateless; <ide> public class AuthenticationEndpoint { <ide> <ide> @Inject <del> private AuthenticationManager authenticationManager; <add> private AuthenticationManager<Agent> authenticationManager; <add> <ide> @Inject <ide> private CredentialMatcher credential; <ide> @Inject
Java
apache-2.0
bf5963598702bad563b346005012b2f4788f9be4
0
innovateme/ICampGeoFence
package com.example.icampgeofence; import java.util.ArrayList; import java.util.List; import android.app.Activity; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.content.IntentSender; import android.location.Location; import android.location.LocationManager; import android.location.LocationProvider; import android.os.Bundle; import android.util.Log; import android.widget.Toast; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.GooglePlayServicesClient; import com.google.android.gms.common.GooglePlayServicesUtil; import com.google.android.gms.location.Geofence; import com.google.android.gms.location.LocationClient; import com.google.android.gms.location.LocationClient.OnAddGeofencesResultListener; import com.google.android.gms.location.LocationClient.OnRemoveGeofencesResultListener; import com.google.android.gms.location.LocationStatusCodes; public class LocationMgr implements GooglePlayServicesClient.ConnectionCallbacks, GooglePlayServicesClient.OnConnectionFailedListener { public static final String TRANSITION_INTENT_ACTION = "geofence_transition"; /* * Define a request code to send to Google Play services * This code is returned in Activity.onActivityResult */ private final static int CONNECTION_FAILURE_RESOLUTION_REQUEST = 9000; private final static int REQUEST_CODE_RECOVER_PLAY_SERVICES = 1001; private final Activity parentActivity; private LocationClient locationClient = null; // Stores the PendingIntent used to request geofence monitoring private PendingIntent geofenceRequestIntent; // Flag that indicates if a request is underway. private boolean inProgress = false; public interface OnDeleteFenceListener { void onDeleteFence(String id); } public LocationMgr(Activity parent) { parentActivity = parent; /* * Create a new location client, using the enclosing class to * handle callbacks. */ locationClient = new LocationClient(parentActivity, this, this); } public LocationClient getClient() { return locationClient; } protected void connect() { // Connect the client. try { locationClient.connect(); } catch (Exception e) { Log.e("LocationMgr could not connect.", e.getMessage(), e); } } protected void disconnect() { // Disconnecting the client invalidates it. locationClient.disconnect(); } public void setMockLocation(double latitude, double longitude, float accuracy) { LocationManager lm = (LocationManager) parentActivity.getSystemService(Context.LOCATION_SERVICE); lm.addTestProvider(LocationManager.GPS_PROVIDER, "requiresNetwork" == "", "requiresSatellite" == "", "requiresCell" == "", "hasMonetaryCost" == "", "supportsAltitude" == "", "supportsSpeed" == "", "supportsBearing" == "", android.location.Criteria.NO_REQUIREMENT, android.location.Criteria.ACCURACY_FINE); Location newLocation = new Location(LocationManager.GPS_PROVIDER); newLocation.setLatitude(latitude); newLocation.setLongitude(longitude); newLocation.setAccuracy(accuracy); lm.setTestProviderEnabled(LocationManager.GPS_PROVIDER, true); lm.setTestProviderStatus(LocationManager.GPS_PROVIDER, LocationProvider.AVAILABLE, null,System.currentTimeMillis()); lm.setTestProviderLocation(LocationManager.GPS_PROVIDER, newLocation); } /* * Called by Location Services when the request to connect the * client finishes successfully. At this point, you can * request the current location or start periodic updates */ @Override public void onConnected(Bundle dataBundle) { // Display the connection status Toast.makeText(parentActivity, "Connected", Toast.LENGTH_SHORT).show(); } /* * Called by Location Services if the connection to the * location client drops because of an error. */ @Override public void onDisconnected() { // Display the connection status Toast.makeText(parentActivity, "Disconnected. Please re-connect.", Toast.LENGTH_SHORT).show(); } /* * Called by Location Services if the attempt to * Location Services fails. */ @Override public void onConnectionFailed(ConnectionResult connectionResult) { /* * Google Play services can resolve some errors it detects. * If the error has a resolution, try sending an Intent to * start a Google Play services activity that can resolve * error. */ if (connectionResult.hasResolution()) { try { // Start an Activity that tries to resolve the error connectionResult.startResolutionForResult( parentActivity, CONNECTION_FAILURE_RESOLUTION_REQUEST); /* * Thrown if Google Play services canceled the original * PendingIntent */ } catch (IntentSender.SendIntentException e) { // Log the error e.printStackTrace(); } } else { /* * If no resolution is available, display a dialog to the * user with the error. */ showErrorDialog(connectionResult.getErrorCode()); } } void showErrorDialog(int code) { GooglePlayServicesUtil.getErrorDialog(code, parentActivity, REQUEST_CODE_RECOVER_PLAY_SERVICES).show(); } /* * Create a PendingIntent that triggers an IntentService in your * app when a geofence transition occurs. */ private PendingIntent getTransitionPendingIntent() { // Create an explicit Intent Intent intent = new Intent(parentActivity, ReceiveTransitionsIntentService.class); intent.setAction(TRANSITION_INTENT_ACTION); /* * Return the PendingIntent */ return PendingIntent.getService( parentActivity, 0, intent, PendingIntent.FLAG_UPDATE_CURRENT); } public void addGeofences(final List<Fence> fenceList, final OnAddGeofencesResultListener listener) { List<Geofence> gfList = new ArrayList<Geofence>(); // create a Geofence from each Fence for (Fence f : fenceList) { gfList.add(f.asGeofence()); } // get pending intent for geofence transitions geofenceRequestIntent = getTransitionPendingIntent(); // Send a request to add the current geofences locationClient.addGeofences(gfList, geofenceRequestIntent, new OnAddGeofencesResultListener() { @Override public void onAddGeofencesResult(int statusCode, String[] geofenceRequestIds) { // if successful, persist the new Fence if (LocationStatusCodes.SUCCESS == statusCode) { FenceMgr.getDefault().add(fenceList); listener.onAddGeofencesResult(statusCode, geofenceRequestIds); Toast.makeText(parentActivity, "Added " + fenceList.size() + " new geofences.", Toast.LENGTH_SHORT).show(); } else { // If adding the geofences failed /* * Report errors here. * You can log the error using Log.e() or update * the UI. */ } // Turn off the in progress flag inProgress = false; } }); } public void removeAllGeofences(final OnRemoveGeofencesResultListener listener) { if (!FenceMgr.getDefault().getFences().isEmpty()) { removeGeofences(FenceMgr.getDefault().getFences(), listener); } } public void removeGeofences(final List<Fence> fenceList, final OnRemoveGeofencesResultListener listener) { List<String> gfList = new ArrayList<String>(); for (Fence f : fenceList) { gfList.add(f.getId()); } locationClient.removeGeofences(gfList, new OnRemoveGeofencesResultListener() { @Override public void onRemoveGeofencesByRequestIdsResult(int statusCode, String[] geofenceRequestIds) { // if successful, persist the change if (LocationStatusCodes.SUCCESS == statusCode) { FenceMgr.getDefault().delete(fenceList); listener.onRemoveGeofencesByRequestIdsResult(statusCode, geofenceRequestIds); Toast.makeText(parentActivity, "Removed " + geofenceRequestIds.length + " geofences.", Toast.LENGTH_SHORT).show(); } else { // If adding the geofences failed /* * Report errors here. * You can log the error using Log.e() or update * the UI. */ } // Turn off the in progress flag inProgress = false; } @Override public void onRemoveGeofencesByPendingIntentResult(int arg0, PendingIntent arg1) { // TODO Auto-generated method stub } }); } public void addGeofence(final Fence fence) { // get pending intent for geofence transitions geofenceRequestIntent = getTransitionPendingIntent(); // create new Geofence from Fence and add to play services Geofence gf = fence.asGeofence(); List<Geofence> gfList = new ArrayList<Geofence>(); gfList.add(gf); // Send a request to add the current geofences locationClient.addGeofences(gfList, geofenceRequestIntent, new OnAddGeofencesResultListener() { @Override public void onAddGeofencesResult(int statusCode, String[] geofenceRequestIds) { // if successful, persist the new Fence if (LocationStatusCodes.SUCCESS == statusCode) { FenceMgr.getDefault().add(fence); Toast.makeText(parentActivity, "Added new geofence named " + fence.getName(), Toast.LENGTH_SHORT).show(); } else if (LocationStatusCodes.GEOFENCE_NOT_AVAILABLE == statusCode) { // If adding the geofences failed Toast.makeText(parentActivity, "Error: Location Access turned off in Settings", Toast.LENGTH_SHORT).show(); } else if (LocationStatusCodes.GEOFENCE_TOO_MANY_GEOFENCES == statusCode) { // If adding the geofences failed Toast.makeText(parentActivity, "Error: Geofence limit exceeded" + fence.getName() + ", Code:" + statusCode, Toast.LENGTH_SHORT).show(); } else { // If adding the geofences failed Toast.makeText(parentActivity, "Error: Geofence not added", Toast.LENGTH_SHORT).show(); } // Turn off the in progress flag inProgress = false; } }); } public void removeGeofence(final Fence fence, final OnDeleteFenceListener listener) { List<String> gfList = new ArrayList<String>(); gfList.add(fence.getId()); locationClient.removeGeofences(gfList, new OnRemoveGeofencesResultListener() { @Override public void onRemoveGeofencesByRequestIdsResult(int statusCode, String[] geofenceRequestIds) { // if successful, persist the change if (LocationStatusCodes.SUCCESS == statusCode) { FenceMgr.getDefault().delete(fence); listener.onDeleteFence(fence.getId()); Toast.makeText(parentActivity, "Removed geofence named " + fence.getName(), Toast.LENGTH_SHORT).show(); } else if (LocationStatusCodes.GEOFENCE_NOT_AVAILABLE == statusCode) { // If adding the geofences failed Toast.makeText(parentActivity, "Error: Location Access turned off in Settings", Toast.LENGTH_SHORT).show(); } else { // If adding the geofences failed Toast.makeText(parentActivity, "Error: Geofence not removed", Toast.LENGTH_SHORT).show(); } // Turn off the in progress flag inProgress = false; } @Override public void onRemoveGeofencesByPendingIntentResult(int arg0, PendingIntent arg1) { // TODO Auto-generated method stub } }); } }
src/com/example/icampgeofence/LocationMgr.java
package com.example.icampgeofence; import java.util.ArrayList; import java.util.List; import android.app.Activity; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.content.IntentSender; import android.location.Location; import android.location.LocationManager; import android.location.LocationProvider; import android.os.Bundle; import android.util.Log; import android.widget.Toast; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.GooglePlayServicesClient; import com.google.android.gms.common.GooglePlayServicesUtil; import com.google.android.gms.location.Geofence; import com.google.android.gms.location.LocationClient; import com.google.android.gms.location.LocationClient.OnAddGeofencesResultListener; import com.google.android.gms.location.LocationClient.OnRemoveGeofencesResultListener; import com.google.android.gms.location.LocationStatusCodes; public class LocationMgr implements GooglePlayServicesClient.ConnectionCallbacks, GooglePlayServicesClient.OnConnectionFailedListener { public static final String TRANSITION_INTENT_ACTION = "geofence_transition"; /* * Define a request code to send to Google Play services * This code is returned in Activity.onActivityResult */ private final static int CONNECTION_FAILURE_RESOLUTION_REQUEST = 9000; private final static int REQUEST_CODE_RECOVER_PLAY_SERVICES = 1001; private final Activity parentActivity; private LocationClient locationClient = null; // Stores the PendingIntent used to request geofence monitoring private PendingIntent geofenceRequestIntent; // Flag that indicates if a request is underway. private boolean inProgress = false; public interface OnDeleteFenceListener { void onDeleteFence(String id); } public LocationMgr(Activity parent) { parentActivity = parent; /* * Create a new location client, using the enclosing class to * handle callbacks. */ locationClient = new LocationClient(parentActivity, this, this); } public LocationClient getClient() { return locationClient; } protected void connect() { // Connect the client. try { locationClient.connect(); } catch (Exception e) { Log.e("LocationMgr could not connect.", e.getMessage(), e); } } protected void disconnect() { // Disconnecting the client invalidates it. locationClient.disconnect(); } public void setMockLocation(double latitude, double longitude, float accuracy) { LocationManager lm = (LocationManager) parentActivity.getSystemService(Context.LOCATION_SERVICE); lm.addTestProvider(LocationManager.GPS_PROVIDER, "requiresNetwork" == "", "requiresSatellite" == "", "requiresCell" == "", "hasMonetaryCost" == "", "supportsAltitude" == "", "supportsSpeed" == "", "supportsBearing" == "", android.location.Criteria.NO_REQUIREMENT, android.location.Criteria.ACCURACY_FINE); Location newLocation = new Location(LocationManager.GPS_PROVIDER); newLocation.setLatitude(latitude); newLocation.setLongitude(longitude); newLocation.setAccuracy(accuracy); lm.setTestProviderEnabled(LocationManager.GPS_PROVIDER, true); lm.setTestProviderStatus(LocationManager.GPS_PROVIDER, LocationProvider.AVAILABLE, null,System.currentTimeMillis()); lm.setTestProviderLocation(LocationManager.GPS_PROVIDER, newLocation); } /* * Called by Location Services when the request to connect the * client finishes successfully. At this point, you can * request the current location or start periodic updates */ @Override public void onConnected(Bundle dataBundle) { // Display the connection status Toast.makeText(parentActivity, "Connected", Toast.LENGTH_SHORT).show(); } /* * Called by Location Services if the connection to the * location client drops because of an error. */ @Override public void onDisconnected() { // Display the connection status Toast.makeText(parentActivity, "Disconnected. Please re-connect.", Toast.LENGTH_SHORT).show(); } /* * Called by Location Services if the attempt to * Location Services fails. */ @Override public void onConnectionFailed(ConnectionResult connectionResult) { /* * Google Play services can resolve some errors it detects. * If the error has a resolution, try sending an Intent to * start a Google Play services activity that can resolve * error. */ if (connectionResult.hasResolution()) { try { // Start an Activity that tries to resolve the error connectionResult.startResolutionForResult( parentActivity, CONNECTION_FAILURE_RESOLUTION_REQUEST); /* * Thrown if Google Play services canceled the original * PendingIntent */ } catch (IntentSender.SendIntentException e) { // Log the error e.printStackTrace(); } } else { /* * If no resolution is available, display a dialog to the * user with the error. */ showErrorDialog(connectionResult.getErrorCode()); } } void showErrorDialog(int code) { GooglePlayServicesUtil.getErrorDialog(code, parentActivity, REQUEST_CODE_RECOVER_PLAY_SERVICES).show(); } /* * Create a PendingIntent that triggers an IntentService in your * app when a geofence transition occurs. */ private PendingIntent getTransitionPendingIntent() { // Create an explicit Intent Intent intent = new Intent(parentActivity, ReceiveTransitionsIntentService.class); intent.setAction(TRANSITION_INTENT_ACTION); /* * Return the PendingIntent */ return PendingIntent.getService( parentActivity, 0, intent, PendingIntent.FLAG_UPDATE_CURRENT); } public void addGeofences(final List<Fence> fenceList, final OnAddGeofencesResultListener listener) { List<Geofence> gfList = new ArrayList<Geofence>(); // create a Geofence from each Fence for (Fence f : fenceList) { gfList.add(f.asGeofence()); } // get pending intent for geofence transitions geofenceRequestIntent = getTransitionPendingIntent(); // Send a request to add the current geofences locationClient.addGeofences(gfList, geofenceRequestIntent, new OnAddGeofencesResultListener() { @Override public void onAddGeofencesResult(int statusCode, String[] geofenceRequestIds) { // if successful, persist the new Fence if (LocationStatusCodes.SUCCESS == statusCode) { FenceMgr.getDefault().add(fenceList); listener.onAddGeofencesResult(statusCode, geofenceRequestIds); Toast.makeText(parentActivity, "Added " + fenceList.size() + " new geofences.", Toast.LENGTH_SHORT).show(); } else { // If adding the geofences failed /* * Report errors here. * You can log the error using Log.e() or update * the UI. */ } // Turn off the in progress flag inProgress = false; } }); } public void removeAllGeofences(final OnRemoveGeofencesResultListener listener) { if (!FenceMgr.getDefault().getFences().isEmpty()) { removeGeofences(FenceMgr.getDefault().getFences(), listener); } } public void removeGeofences(final List<Fence> fenceList, final OnRemoveGeofencesResultListener listener) { List<String> gfList = new ArrayList<String>(); for (Fence f : fenceList) { gfList.add(f.getId()); } locationClient.removeGeofences(gfList, new OnRemoveGeofencesResultListener() { @Override public void onRemoveGeofencesByRequestIdsResult(int statusCode, String[] geofenceRequestIds) { // if successful, persist the change if (LocationStatusCodes.SUCCESS == statusCode) { FenceMgr.getDefault().delete(fenceList); listener.onRemoveGeofencesByRequestIdsResult(statusCode, geofenceRequestIds); Toast.makeText(parentActivity, "Removed " + geofenceRequestIds.length + " geofences.", Toast.LENGTH_SHORT).show(); } else { // If adding the geofences failed /* * Report errors here. * You can log the error using Log.e() or update * the UI. */ } // Turn off the in progress flag inProgress = false; } @Override public void onRemoveGeofencesByPendingIntentResult(int arg0, PendingIntent arg1) { // TODO Auto-generated method stub } }); } public void addGeofence(final Fence fence) { // get pending intent for geofence transitions geofenceRequestIntent = getTransitionPendingIntent(); // create new Geofence from Fence and add to play services Geofence gf = fence.asGeofence(); List<Geofence> gfList = new ArrayList<Geofence>(); gfList.add(gf); // Send a request to add the current geofences locationClient.addGeofences(gfList, geofenceRequestIntent, new OnAddGeofencesResultListener() { @Override public void onAddGeofencesResult(int statusCode, String[] geofenceRequestIds) { // if successful, persist the new Fence if (LocationStatusCodes.SUCCESS == statusCode) { FenceMgr.getDefault().add(fence); Toast.makeText(parentActivity, "Added new geofence named " + fence.getName(), Toast.LENGTH_SHORT).show(); } else { // If adding the geofences failed /* * Report errors here. * You can log the error using Log.e() or update * the UI. */ } // Turn off the in progress flag inProgress = false; } }); } public void removeGeofence(final Fence fence, final OnDeleteFenceListener listener) { List<String> gfList = new ArrayList<String>(); gfList.add(fence.getId()); locationClient.removeGeofences(gfList, new OnRemoveGeofencesResultListener() { @Override public void onRemoveGeofencesByRequestIdsResult(int statusCode, String[] geofenceRequestIds) { // if successful, persist the change if (LocationStatusCodes.SUCCESS == statusCode) { FenceMgr.getDefault().delete(fence); listener.onDeleteFence(fence.getId()); Toast.makeText(parentActivity, "Removed geofence named " + fence.getName(), Toast.LENGTH_SHORT).show(); } else { // If adding the geofences failed /* * Report errors here. * You can log the error using Log.e() or update * the UI. */ } // Turn off the in progress flag inProgress = false; } @Override public void onRemoveGeofencesByPendingIntentResult(int arg0, PendingIntent arg1) { // TODO Auto-generated method stub } }); } }
Added error messages when location services is turned off
src/com/example/icampgeofence/LocationMgr.java
Added error messages when location services is turned off
<ide><path>rc/com/example/icampgeofence/LocationMgr.java <ide> FenceMgr.getDefault().add(fence); <ide> Toast.makeText(parentActivity, "Added new geofence named " + fence.getName(), Toast.LENGTH_SHORT).show(); <ide> } <add> else if (LocationStatusCodes.GEOFENCE_NOT_AVAILABLE == statusCode) { <add> // If adding the geofences failed <add> Toast.makeText(parentActivity, "Error: Location Access turned off in Settings", Toast.LENGTH_SHORT).show(); <add> } <add> else if (LocationStatusCodes.GEOFENCE_TOO_MANY_GEOFENCES == statusCode) { <add> // If adding the geofences failed <add> Toast.makeText(parentActivity, "Error: Geofence limit exceeded" + fence.getName() + ", Code:" + statusCode, Toast.LENGTH_SHORT).show(); <add> } <ide> else { <ide> // If adding the geofences failed <del> /* <del> * Report errors here. <del> * You can log the error using Log.e() or update <del> * the UI. <del> */ <add> Toast.makeText(parentActivity, "Error: Geofence not added", Toast.LENGTH_SHORT).show(); <ide> } <ide> // Turn off the in progress flag <ide> inProgress = false; <ide> listener.onDeleteFence(fence.getId()); <ide> Toast.makeText(parentActivity, "Removed geofence named " + fence.getName(), Toast.LENGTH_SHORT).show(); <ide> } <add> else if (LocationStatusCodes.GEOFENCE_NOT_AVAILABLE == statusCode) { <add> // If adding the geofences failed <add> Toast.makeText(parentActivity, "Error: Location Access turned off in Settings", Toast.LENGTH_SHORT).show(); <add> } <ide> else { <ide> // If adding the geofences failed <del> /* <del> * Report errors here. <del> * You can log the error using Log.e() or update <del> * the UI. <del> */ <add> Toast.makeText(parentActivity, "Error: Geofence not removed", Toast.LENGTH_SHORT).show(); <ide> } <ide> // Turn off the in progress flag <ide> inProgress = false;
JavaScript
apache-2.0
ecbb1d2367cdd7a42f4271197c3a321bd7e3c2ef
0
uplne/vivamusica2014
db.program.insert({ "datenum" : 21, "datemonth" : "jún", "datemonth_en": "June", "datetime" : "20:30<br />Entrance free", "place" : "Hlavné námestie", "place_en": "Main Square", "title" : "Sen noci svätojánskej", "title_en": "A Midsummer Night’s Dream", "intro" : "Otvárací koncert 10. ročníka Viva Musica! festivalu a Kultúrneho leta a Hradných slávností Bratislava 2014", "intro_en": "Opening concert in the 10th annual Viva Musica! Festival and Bratislava Cultural Summer and Castle Festival 2014", "text" : "Viva Musica! festival v roku 2014 oslavuje okrúhle 10. narodeniny a svojim návštevníkom opäť ponúkne niekoľko exkluzívnych hudobných zážitkov. V rámci otváracieho koncertu uvedieme v spolupráci s medzinárodným festivalom Letné shakespearovské slávnosti a Kultúrne leto a hradné slávnosti Bratislava 2014 Shakespearovu romantickú komédiu <em>Sen noci svätojánskej</em> s rovnomennou scénickou hudbou nemeckého hudobného skladateľa Felixa Mendelssohna-Bartholdyho (1809-1847). Shakespearov text ožije v podaní Sabiny Laurinovej, Oldřicha Víznera a Csongora Kassaia v sprievode Mendelssohnovej hudby interpretovanej Slovenskou filharmóniou pod vedením Leoša Svárovského. Viva Shakespeare!<br /><br /><b>Realizačný tím:</b><br /><br />Preklad: Martin Hilský, Ľubomír Feldek<br />Dramaturgia a réžia: Róbert Mankovecký<br />Producent za LSS: Janka Zednikovičová<br /><br /><b>Účinkujú:</b><br /><br />Oberon – <b>Oldřich Vízner</b><br />Titania – <b>Sabina Laurinová</b><br />Puk – <b>Csongor Kassai</b><br />Klbko – <b>Peter Kadlečík</b><br />Väzba – <b>Jakub Rybárik</b><br /><br /><b>Petronela Drobná</b> – soprán<br /><b>Katarína Kubovičová-Sroková</b> – alt<br /><b>Ženský spevácky zbor</b><br /><b>Jozef Chabroň</b> – zbormajster<br /><br /><b>Slovenská filharmónia</b><br /><b>Leoš Svárovský</b> – dirigent", "text_en": "In 2014 the Viva Musica! Festival celebrates its 10th birthday and again offers its visitors some exquisite musical delights. In collaboration with the international Summer Shakespeare Festival, Bratislava Cultural Summer and Castle Festival 2014, our opening concert presents Shakespeare’s romantic comedy <em>A Midsummer Night’s Dream</em> with the identically-named music by the German composer Felix Mendelssohn-Bartholdy (1809-1847). Shakespeare’s text is vividly rendered by Sabina Laurinová, Oldřich Vízner and Csongor Kassai, accompanied by Mendelssohn’s music performed by the Slovak Philharmonic and members of the Slovak Philharmonic Choir conducted by Leoš Svárovský. Viva Shakespeare!<br /><br /><b>Production team:</b><br /><br />Translators: Martin Hilský, Ľubomír Feldek<br />Dramaturge and director: Róbert Mankovecký<br />Producer for SSF: Janka Zednikovičová<br /><br /><b>Performers:</b><br /><br />Oberon – <b>Oldřich Vízner</b><br />Titania – <b>Sabina Laurinová</b><br />Puck – <b>Csongor Kassai</b><br />Bottom – <b>Peter Kadlečík</b><br />Quince – <b>Jakub Rybárik</b><br /><br /><b>Petronela Drobná</b> – soprano<br /><b>Katarína Kubovičová-Sroková</b> – alto<br /><b>Members of the Slovak Philharmonic Choir</b><br /><b>Jozef Chabroň</b> – choirmaster<br /><br /><b>Slovak Philharmonic</b><br /><b>Leoš Svárovský</b> – conductor", "img" : "sen.jpg", "path" : "sen-noci-svatojanskej", "tickets" : "", "price" : "Vstup voľný", "price_en": "Entrance free" }) db.program.insert({ "datenum" : 22, "datemonth" : "jún", "datemonth_en" : "June", "datetime" : "17:00", "place" : "Nádvorie Primaciálneho paláca", "place_en": "Primatial Palace Forecourt", "title" : "Čarovná flauta", "title_en": "The Magic Flute", "intro" : "Detský deň v rámci Viva Musica! festivalu na obľúbenom Hlavnom námestí a na Nádvorí Primaciálneho paláca prinesie okrem iného aj netradičné spracovanie opernej klasiky pre deti.", "intro_en": "The famous Mozart opera as you’ve never known it!", "text" : "Mozartova opera Čarovná flauta rozpráva príbeh princa Tamina, ktorý bojuje o priazeň pôvabnej princeznej Paminy, dcéry Kráľovnej noci, a vtáčkara Papagena túžiaceho po krásnej Papagene. Silou lásky a pravdy hrdinovia prekonávajú nástrahy Kráľovnej noci, jej troch dvorných dám a sluhu Monostata. Mozartova opera v podaní pražského Národného bábkového divadla je určená predovšetkým deťom a ich rodičom a prinesie okrem stretnutia s operou a hudbou rakúskeho génia aj nevšedný zážitok z vizuálneho stvárnenia diela v podaní bábok so živými hercami.<br /><br /><b>Národné bábkové divadlo Praha:</b><br />Tamino – <b>Ivan Čermák</b> <br />Papageno – <b>Michal Džula</b><br />Sarastro – <b>Roman Havelka</b><br />Papagena – <b>Taťana Zemanová</b><br />Pamina – <b>Linda Bláhová Lahnerová</b><br />Kráľovná noci / Dáma – <b>Vlastimila Žaludová</b><br /><br /><b>Martin Vanek</b> – rozprávač <br /><br /><b>Realizačný tím:</b><br />Réžia: <b>Karel Brožek</b><br />Vedúci techniky: <b>Karel Vacek</b><br />Technika: <b>David Hron, Kateřina Hronová</b><br />Producent: <b>Petr Vodička</b>", "text_en": "Mozart’s opera <em>The Magic Flute</em> tells the story of prince Tamino, who is fighting for the favour of the charming princess Pamina, daughter of the Queen of the Night, and the bird-catcher Papageno who is full of desire for the beautiful Papagena. By the power of love and truth the heroes overcome the snares of the Queen of the Night, her three court ladies and her servant Monostato. Mozart’s opera, as rendered by the Prague National Marionette Theatre, is designed principally for children and their parents, and apart from the encounter with the work and music of the Austrian genius, offers an unusual treat in the visual presentation of the work as rendered by puppets with live actors. Martin Vanek will be the accompanist during the performance.<br /><br /><b>Prague National Marionette Theatre:</b><br />Tamino – <b>Ivan Čermák</b> <br />Papageno – <b>Michal Džula</b><br />Sarastro – <b>Roman Havelka</b><br />Papagena – <b>Taťana Zemanová</b><br />Pamina – <b>Linda Bláhová Lahnerová</b><br />Queen of the Night / Court lady – <b>Vlastimila Žaludová</b><br /><br /><b>Martin Vanek</b> – narrator<br /><br /><b>Production team:</b><br />Director: <b>Karel Brožek</b><br />Head of technics: <b>Karel Vacek</b><br />Technics: <b>David Hron, Kateřina</b><br />Producer: <b>Petr Vodička</b>", "img" : "babkova.jpg", "path" : "carovna-flauta", "tickets" : "http://www.ticketportal.sk/event.aspx?id=19643&idpartner=57", "price" : "" }) db.program.insert({ "datenum" : 22, "datemonth" : "jún", "datemonth_en" : "June", "datetime" : "20:00<br />Entrance free", "place" : "Hlavné námestie", "place_en": "Main Square", "title" : "Virtuoso", "title_en" : "Virtuoso", "intro" : "Predstavte si obrovský orchester zložený z talentovaných detí z celého Slovenska – detí, ktoré by sa za normálnych okolností možno nikdy nestretli...", "intro_en": "Imagine an enormous orchestra composed of talented children from the whole of Slovakia – children you would never meet under normal circumstances...", "text" : "Predstavte si obrovský orchester zložený z talentovaných detí z celého Slovenska – detí, ktoré by sa za normálnych okoSpojila ich však láska k hudbe, hodiny cvičenia na hudobnom nástroji a možno aj v kútiku duše schovaný sen stať sa niekým, kto sa zapíše do hudobných dejín. Unikátny hudobný projekt VIRTUOSO túto predstavu premieňa na realitu prostredníctvom zapojenia detských a mládežníckych talentov zo základných umeleckých škôl z celého Slovenska do jedného veľkého národného orchestra. Šéfdirigentom orchestra je Igor Dohovič, rodák z Prešova, pod ktorého vedením už národný mládežnícky orchester úspešne absolvoval niekoľko koncertov po celom Slovensku. V orchestri platí pravidlo: „Jeden za všetkých – všetci za lásku ku klasickej hudbe!“ A tá je prudko nákazlivá aj pre všetkých ostatných.<br /><br /><b>Program:</b><br /><br /><b>Henry Purcell</b> (1659-1695): Rondeau zo suity Abdelazer<br /><b>Antonio Vivaldi</b> (1678-1741): Štyri ročné obdobia – Jar<br /><b>Antonio Vivaldi</b>: Sinfonia h mol „Al santo sepolcro“, RV 169<br /><b>Wolfgang Amadeus Mozart</b> (1756-1791): Non più andrai, ária Figara z opery Figarova svadba<br /><b>Joseph Haydn</b> (1732-1809): Detská symfónia (1. a 3. časť)<br /><b>Peter Martin:</b> Sovetto<br /><b>Dmitrij Šostakovič</b> (1906-1975): Valčík zo Suity č. 2<br /><b>Hubert Giraud</b> (1920): Sous le ciel de Paris<br /><b>Lucio Dalla</b> (1943-2012): Caruso <br /><b>Karl William Pamp Jenkins</b> (1944): Palladio<br /><b>Ennio Morricone</b> (1928): Vtedy na západe <br /><b>Klaus Badelt</b> (1967): Piráti z Karibiku <br /><b>Astor Piazzolla</b> (1921-1992): Tango <br /><br /><b>Národný mládežnícky orchester Virtuoso</b><br /><b>Filip Tůma</b> – barytón<br /><b>Igor Dohovič</b> – dirigent", "text_en": "But what all of them had in common was a love of music, hours of practice on a musical instrument, and maybe in some corner of their minds a dream of becoming someone whose name would be recorded in musical history. The unique musical project VIRTUOSO makes this idea a reality by bringing together talented children and youth from the primary art schools of all Slovakia into one big national orchestra. The chief conductor is Igor Dohovič, a native of Prešov, under whose direction the orchestra has already successfully completed a number of concerts throughout Slovakia. In this orchestra the rule applies: <em>“One for all, and all for the love of classical music!”</em> And that is highly infectious for everyone else.<br /><br /><b>Program:</b><br /><br /><b>Henry Purcell</b> (1659-1695): Rondeau from Abdelazer suite<br /><b>Antonio Vivaldi</b> (1678-1741): The Four Seasons – Spring<br /><b>Antonio Vivaldi:</b> Sinfonia b minor „Al santo sepolcro“, RV 169<br /><b>Wolfgang Amadeus Mozart</b> (1756-1791): Non più andrai, aria of Figaro from The Marriage of Figaro<br /><b>Joseph Haydn</b> (1732-1809): Children’s Symphony (1st and 3rd mov.)<br /><b>Peter Martin:</b> Sovetto<br /><b>Dmitrij Šostakovič</b> (1906-1975): Waltz from Suite No. 2<br /><b>Hubert Giraud</b> (1920): Sous le ciel de Paris<br /><b>Lucio Dalla</b> (1943-2012): Caruso <br /><b>Karl William Pamp Jenkins</b> (1944): Palladio<br /><b>Ennio Morricone</b> (1928): Once Upon a Time in the West<br /><b>Klaus Badelt</b> (1967): Pirates of the Caribbean<br /><b>Astor Piazzolla</b> (1921-1992): Tango <br /><br /><b>National Youth Orchestra Virtuoso</b><br /><b>Filip Tůma</b> – baritone<br /><b>Igor Dohovič</b> – conductor", "img" : "virtuoso.jpg", "path" : "virtuoso", "tickets" : "", "price" : "Vstup voľný", "price_en": "Entrance free" }) db.program.insert({ "datenum" : 24, "datemonth" : "jún", "datemonth": "June", "datetime" : "20:00", "place" : "Stará tržnica", "place_en": "Old City Market Hall", "title" : "Quasars Ensemble & Dalibor Karvay", "title_en": "Quasars Ensemble & Dalibor Karvay", "intro" : "Jedinečné spojenie hviezd súčasnej klasiky a husľového virtuóza Dalibora Karvaya.", "intro_en": "A unique alliance of stars of contemporary classical music with the violin virtuoso Dalibor Karvay.", "text" : "Komorné zoskupenie Quasars Ensemble pôsobí na slovenskej hudobnej scéne od roku 2008 a počas šiestich rokov sa vyprovilovalo na medzinárodne uznávaný súbor súčasnej klasickej hudby. Quasars Ensemble založil hudobný skladateľ, klavirista a dirigent Ivan Buffa a výnimočnosťou súboru je okrem iného aj fakt, že popri súčasnej klasickej hudbe sa v rovnakej miere venuje aj hudbe starších epoch. Súbor je pravidelným hosťom významných domácich i zahraničných hudobných festivalov, organizuje vlastné projekty v regiónoch Slovenska, má na konte šesť profilových CD albumov a je držiteľom ocenenia Krištáľové krídlo v kategórii „Hudba“ za rok 2013. Špeciálnym hosťom koncertu bude vynikajúci slovenský huslista Dalibor Karvay, ktorý v sprievode Quasars Ensemble pod taktovkou Ivana Buffu uvedie Waxmanovu verziu melódií z Bizetovej opery <em>Carmen</em> a Ravelovu virtuóznu skladbu <em>Tzigane</em>, ktoré v programe doplní <em>Septet</em> francúzskeho skladateľa s poľskými koreňmi Alexandra Tansmana a <em>Komorná hudba</em> č. 1, op. 24 nemeckého autora Paula Hindemitha.<br /><br /><b>Program:</b><br /><br /><b>Alexandre Tansman</b> (1897-1986): Septet<br /><b>Maurice Ravel</b> (1875-1937): Tzigane (arr. I. Buffa)<br /><b>Franz Waxman</b> (1906-1967): Fantázia Carmen (arr. I. Buffa)<br /><br />* * *<br /><br /><b>Alexander Moyzes</b> (1906-1984): Divertimento op. 11 (arr. I. Buffa)<br /><b>Paul Hindemith</b> (1895-1963): Komorná hudba č. 1, op. 24<br /><br /><b>Dalibor Karvay</b> – husle<br /><br /><b>Quasars Ensemble:</b><br /><b>Andrea Bošková</b> – flauta<br /><b>Júlia Csíziková</b> – hoboj<br /><b>Martin Mosorjak</b> – klarinet<br /><b>Attila Jankó</b> – fagot<br /><b>István Siket</b> – trúbka<br /><b>András Kovalcsik</b> – lesný roh<br /><b>Diana Buffa</b> – klavír<br /><b>Tamás Schlanger</b> – bicie<br /><b>Maroš Potokár</b> – 1. husle<br /><b>Peter Mosorjak</b> – 2. husle<br /><b>Peter Zwiebel</b> – viola<br /><b>Andrej Gál</b> – violončelo<br /><b>Marián Bujňák</b> – kontrabas<br /><b>Milan Osadský</b> – akordeón<br /><br /><b>Ivan Buffa</b> – dirigent", "text_en": "The chamber formation Quasars Ensemble has been active on the Slovak music scene since 2008, and in the course of six years has established its profile as an internationally respected formation in contemporary classical music. Quasars Ensemble was founded by the composer, pianist and conductor Ivan Buffa, and one of its exceptional features is that alongside contemporary classical music it also devotes itself in equal measure to the music of earlier epochs. The ensemble is a regular guest at leading music festivals at home and abroad, organises projects of its own in the regions of Slovakia, has six profile CD albums to its credit, and is the holder of the 2013 Crystal Wing Prize in the “Music” category. The concert’s special guest will be the outstanding Slovak violinist Dalibor Karvay, who, accompanied by Quasars Ensemble, will present the Waxman version of melodies from Bizet’s opera <em>Carmen</em> and Ravel’s virtuoso work <em>Tzigane</em>. These will be complemented in the programme by <em>Septet</em>, a work by the French composer (with Polish roots) Alexander Tansman, <em>Divertimento</em> Op. 11 by the Slovak composer Alexander Moyzes, and <em>Chamber Music</em> No. 1, Op. 24 by the German composer Paul Hindemith.<br /><br /><b>Program:</b><br /><br /><b>Alexandre Tansman</b> (1897-1986): Septet<br /><b>Maurice Ravel</b> (1875-1937): Tzigane (arr. I. Buffa)<br /><b>Franz Waxman</b> (1906-1967): Carmen Fantasie (arr. I. Buffa)<br /><br />* * *<br /><br /><b>Alexander Moyzes</b> (1906-1984): Divertimento Op. 11 (arr. I. Buffa)<br /><b>Paul Hindemith</b> (1895-1963): Chamber Music No. 1, Op. 24<br /><br /><b>Dalibor Karvay</b> – violin<br /><br /><b>Quasars Ensemble:</b><br /><br /><b>Andrea Bošková</b> – flute <br /><b>Júlia Csíziková</b> – oboe<br /><b>Martin Mosorjak</b> – clarinet<br /><b>Attila Jankó</b> – bassoon<br /><b>István Siket</b> – trumpet<br /><b>András Kovalcsik</b> – French horn<br /><b>Diana Buffa</b> – piano<br /><b>Tamás Schlanger</b> – percussions<br /><b>Maroš Potokár</b> – 1st violin<br /><b>Peter Mosorjak</b> – 2nd violin<br /><b>Peter Zwiebel</b> – viola<br /><b>Andrej Gál</b> – violoncello<br /><b>Marián Bujňák</b> – double bass <br /><b>Milan Osadský</b> – accordion<br /><br /><b>Ivan Buffa</b> – conductor", "img" : "karvay.jpg", "path" : "quasars-ensemble-dalibor-karvay", "tickets" : "http://www.ticketportal.sk/event.aspx?id=19624&idpartner=57", "price" : "" }) db.program.insert({ "datenum" : 25, "datemonth" : "jún", "datemonth_en" : "June", "datetime" : "20:00", "place" : "Stará tržnica", "place_en": "Old City Market Hall", "title" : "Xavier Sabata & Il Pomo D’Oro", "title_en" : "Xavier Sabata & Il Pomo D’Oro", "intro" : "„Tento projekt patrí k tomu najinteligentnejšiemu a najpozoruhodnejšiemu, čo sa v hudobnom svete zrodilo za posledné roky.“ (Guardian)", "intro_en": "“This project is one of the most intelligent and noteworthy that the music world has produced in recent years.” (Guardian)", "text" : "Španielsky kontratenorista Xavier Sabata vo svojom projekte Händel: Bad Guys odhaľuje svet záporných postáv v Händlových operách a presviedča nás o tom, že baroková opera nemusí byť len o vznešených antických ideáloch. Vďaka svojmu výnimočnému hlasu stvárňuje postavy bezočivých pokrytcov, zlomyseľných tyranov či dokonca obyčajných hlupákov s absolútnym nadhľadom a ľahkosťou. Xavier Sabata pochádza z Barcelony a má za sebou hosťovania na tých najprestížnejších svetových operných a koncertných pódiách. Do Bratislavy príde po prvýkrát so súborom Il pomo d’oro pod vedením huslistu a dirigenta Riccarda Minasiho, ktorý sa špecializuje na tzv. historicky poučenú interpretáciu starej hudby na dobových nástrojoch.<br /><br /><b>Program:</b><br /><br />Georg Friedrich Händel (1685-1759):<br />Sinfonia B dur, HWV 339, 1. časť<br />Vo‘ dar pace a un’alma altiera (Tamerlano)<br />Nella terra, in ciel, nell‘onda (Faramondo)<br />Concerto grosso G dur, HWV 314<br />Bel labbro formato (Ottone, re di Germania)<br />Dover, giustizia, amor (Ariodante)<br /><br />* * *<br /><br />Domerò la tua fierezza (Giulio Cesare)<br />Serenatevi, o luci belle (Teseo)<br />Se l’inganno sortisce felice (Ariodante)<br />Sonáta G dur op. 5, č. 4, HWV 399<br />Così suole a rio vicina (Faramondo)<br />Voglio stragi, e voglio morte (Teseo)<br /><br />Xavier Sabata – kontratenor<br /><br /><b>Il pomo d’oro:</b><br /><b>Alfia Bakieva</b> – husle<br /><b>Boris Begelman</b> – husle<br /><b>Ester Crazzolara</b> – husle <br /><b>Anna Fuskova</b> – husle<br /><b>Daniela Nuzzoli</b> – husle, viola<br /><b>Enrico Parizzi</b> – viola<br /><b>Federico Toffano</b> – violoncello<br /><b>Davide Nava</b> – kontrabas<br /><b>Maxim Emelyanychev</b> – cembalo <br /><br /><b>Riccardo Minasi</b> – husle, umelecké vedenie<br /><br /><a href='http://www.accioncultural.es' target='_blank'><img src='/static/images/partners/ace.jpg' /></a>", "text_en": "In his project <em>Händel: Bad Guys</em> the Spanish countertenor Xavier Sabata uncovers the world of the negative characters in Händel’s operas and persuades us of the fact that baroque opera need not only be about the sublime ideals of antiquity. Making use of his exceptional voice, he creates the characters of ruthless hypocrites, malignant tyrants, and pure-and-simple stupid asses, with absolute clarity and ease. Xavier Sabata comes from Barcelona and has a history of guest appearances on the world’s most prestigious opera and concert stages. He is coming to Bratislava for the first time with Il pomo d’oro ensemble, led by violinist and conductor Riccardo Minasi, who specialises in the so-called historically informed performance of early music on period instruments.<br /><br /><b>Program:</b><br /><br /><b>Georg Friedrich Händel (1685-1759):</b><br /><br />Sinfonia B flat major, HWV 339, 1st mov.<br />Vo‘ dar pace a un’alma altiera (Tamerlano)<br />Nella terra, in ciel, nell‘onda (Faramondo)<br />Concerto Grosso G major, HWV 314<br />Bel labbro formato (Ottone, re di Germania)<br />Dover, giustizia, amor (Ariodante)<br /><br />* * *<br /><br />Domerò la tua fierezza (Giulio Cesare)<br />Serenatevi, o luci belle (Teseo)<br />Se l’inganno sortisce felice (Ariodante)<br />Sonate G major Op. 5, No. 4, HWV 399<br />Così suole a rio vicina (Faramondo)<br />Voglio stragi, e voglio morte (Teseo)<br /><br /><b>Xavier Sabata</b> – countertenor<br /><br /><b>Il pomo d’oro:</b><br /><br /><b>Alfia Bakieva</b> – violin<br /><b>Boris Begelman</b> – violin<br /><b>Ester Crazzolara</b> – violin <br /><b>Anna Fuskova</b> – violin<br /><b>Daniela Nuzzoli</b> – violin, viola<br /><b>Enrico Parizzi</b> – viola<br /><b>Federico Toffano</b> – violoncello<br /><b>Davide Nava</b> – double bass<br /><b>Maxim Emelyanychev</b> – cembalo <br /><br /><b>Riccardo Minasi</b> – violin, conductor<br /><br /><a href='http://www.accioncultural.es' target='_blank'><img src='/static/images/partners/ace.jpg' /></a>", "img" : "sabata.jpg", "path" : "handel-bad-guys", "tickets" : "http://www.ticketportal.sk/event.aspx?id=19543&idpartner=57", "price" : "" }) db.program.insert({ "datenum" : 26, "datemonth" : "jún", "datemonth_en": "June", "datetime" : "20:00", "place" : "Stará tržnica", "place_en": "Old City Market Hall", "title" : "Korben Dallas Symphony", "title_en" : "Korben Dallas Symphony", "intro" : "Výnimočný objav slovenskej hudobnej scény v sprievode orchestra na Viva Musica! festivale.", "intro_en": "Underground goes classic!", "text" : "Korben Dallas minulý rok pokrstil svoj druhý album Karnevalová vrana. V éteri bodujú hity Otec, Zlatý jeleň a Beh a po rokoch v hudobnom podzemí sa Korben Dallas stáva mienkotvornou kapelou. Skupinu založili spevák a gitarista Juraj Benetin a basgitarista Lukáš Fila po rozpade skupiny Appendix, v ktorej spolu hrali trinásť rokov. Bubeníkom sa po dlhom hľadaní stal Ozo Guttler zo skupiny Tu v Dome. Kapela debutovala živým albumom Pekné cesty v roku 2011 a má za sebou okrem hrania v rámci niekoľkých hudobných festivalov aj spoločné koncerty s americkou pesničkárkou Jess Klein, spoluprácu s Ľubom Petruškom z Chiki liki tu-a či s Andrejom Šebanom. S orchestrom však Korben Dallas ešte nikdy nehral – v exkluzívnej premiére po prvýkrát na Viva Musica! festivale!<br /><br /><b>Korben Dallas</b><br /><b>Juraj Benetin</b> – spev, gitara<br /><b>Lukáš Fila</b> – basgitara<br /><b>Ozo Guttler</b> – bicie <br /><br /><b>Špeciálny hosť:</b><br /><b>Ľubo Petruška</b> – gitara (Chiki liki tu-a)<br /><br /><b>Sinfonietta Bratislava</b><br /><b>Braňo Kostka</b> – dirigent <br /><br /><b>Slavomír Solovic</b> – aranžmány", "text_en": "Last year Korben Dallas named his second album <em>Carnival Raven</em>. The hit tunes <em>Father, Golden Deer</em> and <em>Run</em> have won favour on the ether, and after years in the musical underground Korben Dallas has become a trend-setting band. The group was founded by singer and guitarist Juraj Benetin and bass guitarist Lukáš Fila after the break-up of Appendix, where they had played together for thirteen years. After much searching, Ozo Guttler from the Here at Home group became the drummer. The band made its debut with the live album <em>Fine Roads</em> in 2011, and apart from playing in a number of music festivals it has also performed concerts together with the American singer Jess Klein and collaborated with Ľuboš Petruška of Chiki liki tu-a and Andrej Šeban. However, up to now Korben Dallas has never played with an orchestra – here it is in an exclusive premiere, first time in the Viva Musica! Festival, under the baton of Braňo Kostka!<br /><br /><b>Korben Dallas</b><br /><b>Juraj Benetin</b> – vocals, guitar<br /><b>Lukáš Fila</b> – bassguitar<br /><b>Ozo Guttler</b> – drums<br /><br /><b>Special guest:</b><br /><b>Ľubo Petruška</b> – guitar (Chiki liki tu-a)<br /><br /><b>Sinfonietta Bratislava</b><br /><b>Braňo Kostka</b> – conductor<br /><br /><b>Slavomír Solovic</b> – arranger", "img" : "korben.jpg", "path" : "korben-dallas-symphony", "tickets" : "http://www.ticketportal.sk/event.aspx?id=19545&idpartner=57", "price" : "" }) db.program.insert({ "datenum" : 27, "datemonth" : "jún", "datemonth_en": "June", "datetime" : "22:00 <br /><br />24:00 hororová noc v Gorila.sk Urban Space", "place" : "Stará tržnica", "place_en": "Old City Market Hall", "title" : "Upír Nosferatu", "title_en": "Nosferatu", "intro" : "„Keď sa za Hutterom samé od seba zavreli dvere hradu, jeho osud bol spečatený. Mal sa stať prvou svetoznámou obeťou prvého svetoznámeho upíra a ako mu povedal Dr. Sievers: Svojmu osudu neutečiete.“ (www.kinema.sk)", "intro_en": "The legendary silent film with orchestral accompaniment.<br /><br />24:00 a night of horror in Gorila.sk Urban Space", "text" : "Každý ho pozná, ale málokto ho v súčasnosti naozaj videl. Reč je o klasickom nemeckom nemom filme Upír Nosferatu (Nosferatu, eine Symphonie des Grauens; r. F. W. Murnau, 1922), ktorý už takmer storočie desí obecenstvo na celom svete. Keď sa nemecký expresionista Friedrich Wilhelm Murnau rozhodol natočiť adaptáciu slávneho románu Brama Stokera Dracula netušil, že vytvorí nadčasové dielo, ktoré budú filmoví vedci študovať ešte dlho po jeho smrti, a ktoré položí základy filmového hororu. Mladý úradník realitnej kancelárie Hutter prichádza do Transylvánie na hrad bohatého kupca, grófa Orloka, avšak po jeho návrate už nič nie je tak ako predtým... V rámci Viva Musica! festivalu uvedieme Murnauov film s autorskou hudbou slovenského skladateľa Vladislava Šarišského, laureáta prestížnej Medzinárodnej súťaže Sergeja Prokofieva v Petrohrade a držiteľa Ceny pre mladého tvorcu udeľovanej Nadáciou Tatra banky.<br /><br /><b>Vladislav „Slnko“ Šarišský</b> – autor hudby, hudobné naštudovanie, theremin<br /><br /><b>Adam Novák</b> – 1. husle<br />J<b>án Kružliak, ml.</b> – 2. husle<br /><b>Martin Mierny</b> – viola <br /><b>Boris Bohó</b> – violončelo <br /><b>Milan Osadský</b> – akordeón<br />Štefan Bugala – tympany, perkusie<br /><br /><b>Po koncerte v Starej tržnici pokračujeme hororovou nocou v Gorila.sk Urban Space!<br /><br />Príďte sa báť po kultovom Draculovi do Gorila.sk Urban Space. 27. júna o 24:00 na Nám. SNP začína hororová noc. Prinesieme vám tri hororové filmy, ktoré vybrali fanúšikovia tohto žánru. Úplne vážne: návštevu odporúčame len tým, ktorí sa neboja!</b><br /><br />PROGRAM<br /><br /><b>Sinister</b> (2012, USA)<br /><b>V zajatí démonov</b> (2013, USA)<br /><b>Tucker a Dale vs. Zlo</b> (2010, USA)", "text_en": "Everyone knows of it, but at the present day few have actually seen it. We are referring to the classical German silent film <em>Nosferatu</em> (<em>Nosferatu, eine Symphonie des Grauens</em>; dir. F. W. Murnau, 1922), which for almost a century has been frightening audiences throughout the world. When the German expressionist Friedrich Wilhelm Murnau decided to film an adaptation of Bram Stoker’s famous novel <em>Dracula</em>, he had no idea he was about to create a timeless work which would lay the foundations of film horror and would be studied by scholars of film long after his death. A young clerk of the Hutter estate agency comes to Transylvania to the castle of Count Orlok, a wealthy merchant, but after his return nothing is as it was before... As part of the Viva Musica! Festival we are presenting Murnau’s film with music written by the Slovak composer Vladislav “Sun” Šarišský, laureate of the prestigious international Sergej Prokofiev Competition in St. Petersburg and holder of the Young Artist’s Prize awarded by the Tatra Bank Foundation.<br /><br /><b>Vladislav „Slnko“ Šarišský</b> – composer, theremin<br /><br /><b>Adam Novák</b> – 1st violin<br /><b>Ján Kružliak, ml.</b> – 2nd violin<br /><b>Martin Mierny</b> – viola <br /><b>Boris Bohó</b> – violoncello <br /><b>Milan Osadský</b> – accordion<br /><b>Štefan Bugala</b> – timpani, percussions<br /><br /><em>After the concert in the Old City Market Hall, we continue with the night of horror in Gorila.sk Urban Space! At 24:00, June 27, the night of horror begins at SNP Square. We are bringing you three horror films which fans of this genre have selected. Quite seriously: we recommend a visit only to those who aren’t scared!</em><br /><br /><b>Programme of the night of horror:</b><br /><br /><b>Sinister</b> (2012, USA)<br /><b>The Conjuring</b> (2013, USA)<br /><b>Tucker & Dale vs. Evil</b> (2010, USA)", "img" : "nosferatu.jpg", "path" : "upir-nosferatu", "tickets" : "http://www.ticketportal.sk/event.aspx?ID=19546&idpartner=57", "price" : "" }) db.program.insert({ "datenum" : 28, "datemonth" : "jún", "datemonth_en" : "June", "datetime" : "20:00", "place" : "Bratislavský hrad", "place_en": "Bratislava Castle", "title" : "Viva Opera!", "title_en" : "Viva Opera!", "intro" : "Svetoznáme slovenské operné hviezdy po prvýkrát spolu na jednom pódiu!", "intro_en": "World-famous opera stars together on one stage for the first time!", "text" : "Záverečný koncert 10. ročníka medzinárodného festivalu Viva Musica! bude oslavou opery. Na jednom pódiu sa po prvýkrát spolu stretnú najlepší slovenskí operní sólisti – Adriana Kučerová, Jana Kurucová, Miroslav Dvorský, Dalibor Jenis a Štefan Kocán, ktorí v sprievode Orchestra Viva Musica! pod taktovkou Martina Leginusa, súčasného hudobného riaditeľa a šéfdirigenta pražskej Štátnej opery, uvedú známe i menej známe operné lahôdky z pera takých operných majstrov, akými boli Giuseppe Verdi, Georges Bizet, Gioacchino Rossini či Giacomo Puccini. Viva Opera!<br /><br />Program:<br /><br /><b>Gioacchino Rossini</b> (1792-1868):<br />Barbier zo Sevilly, predohra<br />Largo al factotum, ária z opery Barbier zo Sevilly<br />Oh patria!... Di tanti palpiti, ária z opery Tancredi <br />La calunnia é un venticello, ária z opery Barbier zo Sevilly <br /><br /><b>Gaetano Donizetti</b> (1797-1848):<br />Quel guardo il cavaliere, ária z opery Don Pasquale<br /><br /><b>Giuseppe Verdi</b> (1813-1901):<br />Lunge da lei... De‘ miei bollenti spiriti, ária z opery La traviata<br />Propizio ei giunge... Vieni a me, ti benedico, duet z opery Simon Boccanegra<br />Vanne la tua meta gia vedo... Credo, in un dio crudel, ária z opery Otello <br />E lui!... desso... l’Infante!... Dio che nell’alma infondere, duet z opery Don Carlos<br /><br /><b>Arrigo Boito</b> (1842-1918):<br />Son lo spirito che nega, ária z opery Mefistofeles<br /><br /><b>Giuseppe Verdi:</b><br />Bella figlia dell'amore, kvartet z opery Rigoletto <br /><br />* * *<br /><br /><b>Georges Bizet</b> (1838-1875):<br />Carmen, predohra<br />Les tringles des sistres tintaient, ária z opery Carmen <br />Votre toast, je peux vous le rendre, ária z opery Carmen<br />La fleur que tu m'avais jetée, ária z opery Carmen <br /><br /><b>Léo Delibes</b> (1836-1891):<br />Viens, Mallika, les lianes en fleurs... Dôme épais, le jasmin, duet z opery Lakmé <br /><br /><b>Jacques-François-Fromental-Élie Halévy <b/>(1799-1862):<br />Si la rigueur et la vengeance, ária z opery Židovka<br /><br /><b>Léo Delibes:</b><br />Les filles de Cadix<br /><br />Adriana Kučerová – soprán<br /><b>Jana Kurucová</b> – mezzosoprán<br /><b>Miroslav Dvorský</b> – tenor <br /><b>Dalibor Jenis</b> – barytón<br /><b>Štefan Kocán</b> – bas<br /><br /><b>Orchester Viva Musica!</b><br /><br /><b>Martin Leginus</b> – dirigent", "text_en": " The concluding concert of the 10th annual international Viva Musica! Festival will be a celebration of opera. For the first time the finest Slovak opera soloists will meet on the same stage. Adriana Kučerová, Jana Kurucová, Miroslav Dvorský, Dalibor Jenis and Štefan Kocán, accompanied by the Orchestra Viva Musica! under the baton of Martin Leginus, musical director and chief conductor of the Prague State Opera, will perform well-known and less well-known delights of opera, composed by such masters as Giuseppe Verdi, Georges Bizet, Gioacchino Rossini, and Giacomo Puccini. Viva opera!.<br /><br /><b>Program:</b><br /><br /><b>Gioacchino Rossini</b> (1792-1868):<br />The Barber of Seville, overture<br />Largo al factotum, aria from The Barber of Seville<br />Oh patria!... Di tanti palpiti, aria from Tancredi <br />La calunnia é un venticello, aria from The Barber of Seville<br /><br /><b>Gaetano Donizetti</b> (1797-1848):<br />Quel guardo il cavaliere, aria from Don Pasquale<br /><br /><b>Giuseppe Verdi</b> (1813-1901):<br />Lunge da lei... De‘ miei bollenti spiriti, aria from La traviata<br />Propizio ei giunge... Vieni a me, ti benedico, duet from Simon Boccanegra<br />Vanne la tua meta gia vedo... Credo, in un dio crudel, aria from Otello <br />E lui!... desso... l’Infante!... Dio che nell’alma infondere, duet from Don Carlos<br /><br /><b>Arrigo Boito</b> (1842-1918):<br />Son lo spirito che nega, aria from Mefistofele<br /><br /><b>Giuseppe Verdi:</b><br />Bella figlia dell'amore, quartet from Rigoletto <br /><br />* * *<br /><br /><b>Georges Bizet</b> (1838-1875):<br />Carmen, overture<br />Les tringles des sistres tintaient, aria from Carmen <br />Votre toast, je peux vous le rendre, aria from Carmen<br />La fleur que tu m'avais jetée, aria from Carmen <br /><br /><b>Léo Delibes</b> (1836-1891):<br />Viens, Mallika, les lianes en fleurs... Dôme épais, le jasmin, duet from Lakmé <br /><br /><b>Jacques-François-Fromental-Élie Halévy</b> (1799-1862): <br />Si la rigueur et la vengeance, aria from La Juive<br /><br /><b>Léo Delibes:</b><br />Les filles de Cadix<br /><br /><b>Adriana Kučerová</b> – soprano<br /><b>Jana Kurucová</b> – mezzosoprano<br /><b>Miroslav Dvorský</b> – tenor <br /><b>Dalibor Jenis</b> – baritone<br /><b>Štefan Kocán</b> – bass<br /><br /><b>Viva Musica! orchestra</b><br /><b>Martin Leginus</b> – conductor", "img" : "opera.jpg", "path" : "viva-opera", "tickets" : "http://www.ticketportal.sk/event.aspx?id=19471&idpartner=57", "price" : "" })
mongosave.js
db.program.insert({ "datenum" : 21, "datemonth" : "jún", "datemonth_en": "June", "datetime" : "20:30<br />Entrance free", "place" : "Hlavné námestie", "place_en": "Main Square", "title" : "Sen noci svätojánskej", "title_en": "A Midsummer Night’s Dream", "intro" : "Otvárací koncert 10. ročníka Viva Musica! festivalu a Kultúrneho leta a Hradných slávností Bratislava 2014", "intro_en": "Opening concert in the 10th annual Viva Musica! Festival and Bratislava Cultural Summer and Castle Festival 2014", "text" : "Viva Musica! festival v roku 2014 oslavuje okrúhle 10. narodeniny a svojim návštevníkom opäť ponúkne niekoľko exkluzívnych hudobných zážitkov. V rámci otváracieho koncertu uvedieme v spolupráci s medzinárodným festivalom Letné shakespearovské slávnosti a Kultúrne leto a hradné slávnosti Bratislava 2014 Shakespearovu romantickú komédiu <em>Sen noci svätojánskej</em> s rovnomennou scénickou hudbou nemeckého hudobného skladateľa Felixa Mendelssohna-Bartholdyho (1809-1847). Shakespearov text ožije v podaní Sabiny Laurinovej, Oldřicha Víznera a Csongora Kassaia v sprievode Mendelssohnovej hudby interpretovanej Slovenskou filharmóniou pod vedením Leoša Svárovského. Viva Shakespeare!<br /><br /><b>Realizačný tím:</b><br /><br />Preklad: Martin Hilský, Ľubomír Feldek<br />Dramaturgia a réžia: Róbert Mankovecký<br />Producent za LSS: Janka Zednikovičová<br /><br /><b>Účinkujú:</b><br /><br />Oberon – Oldřich Vízner<br />Titania – Sabina Laurinová<br />Puk – Csongor Kassai<br />Klbko – Peter Kadlečík<br />Väzba – Jakub Rybárik<br /><br />Petronela Drobná – soprán<br />Katarína Kubovičová-Sroková – alt<br />Ženský spevácky zbor<br />Jozef Chabroň – zbormajster<br /><br /><b>Slovenská filharmónia</b><br />Leoš Svárovský – dirigent", "text_en": "In 2014 the Viva Musica! Festival celebrates its 10th birthday and again offers its visitors some exquisite musical delights. In collaboration with the international Summer Shakespeare Festival, Bratislava Cultural Summer and Castle Festival 2014, our opening concert presents Shakespeare’s romantic comedy <em>A Midsummer Night’s Dream</em> with the identically-named music by the German composer Felix Mendelssohn-Bartholdy (1809-1847). Shakespeare’s text is vividly rendered by Sabina Laurinová, Oldřich Vízner and Csongor Kassai, accompanied by Mendelssohn’s music performed by the Slovak Philharmonic and members of the Slovak Philharmonic Choir conducted by Leoš Svárovský. Viva Shakespeare!<br /><br /><b>Production team:</b><br /><br />Translators: Martin Hilský, Ľubomír Feldek<br />Dramaturge and director: Róbert Mankovecký<br />Producer for SSF: Janka Zednikovičová<br /><br /><b>Performers:</b><br /><br />Oberon – Oldřich Vízner<br />Titania – Sabina Laurinová<br />Puck – Csongor Kassai<br />Bottom – Peter Kadlečík<br />Quince – Jakub Rybárik<br /><br />Petronela Drobná – soprano<br />Katarína Kubovičová-Sroková – alto<br />Members of the Slovak Philharmonic Choir<br />Jozef Chabroň –choirmaster<br /><br />Slovak Philharmonic<br />Leoš Svárovský – conductor", "img" : "sen.jpg", "path" : "sen-noci-svatojanskej", "tickets" : "", "price" : "Vstup voľný", "price_en": "Entrance free" }) db.program.insert({ "datenum" : 22, "datemonth" : "jún", "datemonth_en" : "June", "datetime" : "17:00", "place" : "Nádvorie Primaciálneho paláca", "place_en": "Primatial Palace Forecourt", "title" : "Čarovná flauta", "title_en": "The Magic Flute", "intro" : "Detský deň v rámci Viva Musica! festivalu na obľúbenom Hlavnom námestí a na Nádvorí Primaciálneho paláca prinesie okrem iného aj netradičné spracovanie opernej klasiky pre deti.", "intro_en": "The famous Mozart opera as you’ve never known it!", "text" : "Mozartova opera Čarovná flauta rozpráva príbeh princa Tamina, ktorý bojuje o priazeň pôvabnej princeznej Paminy, dcéry Kráľovnej noci, a vtáčkara Papagena túžiaceho po krásnej Papagene. Silou lásky a pravdy hrdinovia prekonávajú nástrahy Kráľovnej noci, jej troch dvorných dám a sluhu Monostata. Mozartova opera v podaní pražského Národného bábkového divadla je určená predovšetkým deťom a ich rodičom a prinesie okrem stretnutia s operou a hudbou rakúskeho génia aj nevšedný zážitok z vizuálneho stvárnenia diela v podaní bábok so živými hercami.<br /><br /><b>Národné bábkové divadlo Praha:</b><br />Tamino – <b>Ivan Čermák</b> <br />Papageno – <b>Michal Džula</b><br />Sarastro – <b>Roman Havelka</b><br />Papagena – <b>Taťana Zemanová</b><br />Pamina – <b>Linda Bláhová Lahnerová</b><br />Kráľovná noci / Dáma – <b>Vlastimila Žaludová</b><br /><br /><b>Martin Vanek</b> – rozprávač <br /><br /><b>Realizačný tím:</b><br />Réžia: <b>Karel Brožek</b><br />Vedúci techniky: <b>Karel Vacek</b><br />Technika: <b>David Hron, Kateřina Hronová</b><br />Producent: <b>Petr Vodička</b>", "text_en": "Mozart’s opera <em>The Magic Flute</em> tells the story of prince Tamino, who is fighting for the favour of the charming princess Pamina, daughter of the Queen of the Night, and the bird-catcher Papageno who is full of desire for the beautiful Papagena. By the power of love and truth the heroes overcome the snares of the Queen of the Night, her three court ladies and her servant Monostato. Mozart’s opera, as rendered by the Prague National Marionette Theatre, is designed principally for children and their parents, and apart from the encounter with the work and music of the Austrian genius, offers an unusual treat in the visual presentation of the work as rendered by puppets with live actors. Martin Vanek will be the accompanist during the performance.<br /><br /><b>Prague National Marionette Theatre:</b><br />Tamino – <b>Ivan Čermák</b> <br />Papageno – <b>Michal Džula</b><br />Sarastro – <b>Roman Havelka</b><br />Papagena – <b>Taťana Zemanová</b><br />Pamina – <b>Linda Bláhová Lahnerová</b><br />Queen of the Night / Court lady – <b>Vlastimila Žaludová</b><br /><br /><b>Martin Vanek</b> – narrator<br /><br /><b>Production team:</b><br />Director: <b>Karel Brožek</b><br />Head of technics: <b>Karel Vacek</b><br />Technics: <b>David Hron, Kateřina</b><br />Producer: <b>Petr Vodička</b>", "img" : "babkova.jpg", "path" : "carovna-flauta", "tickets" : "http://www.ticketportal.sk/event.aspx?id=19643&idpartner=57", "price" : "" }) db.program.insert({ "datenum" : 22, "datemonth" : "jún", "datemonth_en" : "June", "datetime" : "20:00<br />Entrance free", "place" : "Hlavné námestie", "place_en": "Main Square", "title" : "Virtuoso", "title_en" : "Virtuoso", "intro" : "Predstavte si obrovský orchester zložený z talentovaných detí z celého Slovenska – detí, ktoré by sa za normálnych okolností možno nikdy nestretli...", "intro_en": "Imagine an enormous orchestra composed of talented children from the whole of Slovakia – children you would never meet under normal circumstances...", "text" : "Predstavte si obrovský orchester zložený z talentovaných detí z celého Slovenska – detí, ktoré by sa za normálnych okoSpojila ich však láska k hudbe, hodiny cvičenia na hudobnom nástroji a možno aj v kútiku duše schovaný sen stať sa niekým, kto sa zapíše do hudobných dejín. Unikátny hudobný projekt VIRTUOSO túto predstavu premieňa na realitu prostredníctvom zapojenia detských a mládežníckych talentov zo základných umeleckých škôl z celého Slovenska do jedného veľkého národného orchestra. Šéfdirigentom orchestra je Igor Dohovič, rodák z Prešova, pod ktorého vedením už národný mládežnícky orchester úspešne absolvoval niekoľko koncertov po celom Slovensku. V orchestri platí pravidlo: „Jeden za všetkých – všetci za lásku ku klasickej hudbe!“ A tá je prudko nákazlivá aj pre všetkých ostatných.<br /><br /><b>Program:</b><br /><br /><b>Henry Purcell</b> (1659-1695): Rondeau zo suity Abdelazer<br /><b>Antonio Vivaldi</b> (1678-1741): Štyri ročné obdobia – Jar<br /><b>Antonio Vivaldi</b>: Sinfonia h mol „Al santo sepolcro“, RV 169<br /><b>Wolfgang Amadeus Mozart</b> (1756-1791): Non più andrai, ária Figara z opery Figarova svadba<br /><b>Joseph Haydn</b> (1732-1809): Detská symfónia (1. a 3. časť)<br /><b>Peter Martin:</b> Sovetto<br /><b>Dmitrij Šostakovič</b> (1906-1975): Valčík zo Suity č. 2<br /><b>Hubert Giraud</b> (1920): Sous le ciel de Paris<br /><b>Lucio Dalla</b> (1943-2012): Caruso <br /><b>Karl William Pamp Jenkins</b> (1944): Palladio<br /><b>Ennio Morricone</b> (1928): Vtedy na západe <br /><b>Klaus Badelt</b> (1967): Piráti z Karibiku <br /><b>Astor Piazzolla</b> (1921-1992): Tango <br /><br /><b>Národný mládežnícky orchester Virtuoso</b><br /><b>Filip Tůma</b> – barytón<br /><b>Igor Dohovič</b> – dirigent", "text_en": "But what all of them had in common was a love of music, hours of practice on a musical instrument, and maybe in some corner of their minds a dream of becoming someone whose name would be recorded in musical history. The unique musical project VIRTUOSO makes this idea a reality by bringing together talented children and youth from the primary art schools of all Slovakia into one big national orchestra. The chief conductor is Igor Dohovič, a native of Prešov, under whose direction the orchestra has already successfully completed a number of concerts throughout Slovakia. In this orchestra the rule applies: <em>“One for all, and all for the love of classical music!”</em> And that is highly infectious for everyone else.<br /><br /><b>Program:</b><br /><br /><b>Henry Purcell</b> (1659-1695): Rondeau from Abdelazer suite<br /><b>Antonio Vivaldi</b> (1678-1741): The Four Seasons – Spring<br /><b>Antonio Vivaldi:</b> Sinfonia b minor „Al santo sepolcro“, RV 169<br /><b>Wolfgang Amadeus Mozart</b> (1756-1791): Non più andrai, aria of Figaro from The Marriage of Figaro<br /><b>Joseph Haydn</b> (1732-1809): Children’s Symphony (1st and 3rd mov.)<br /><b>Peter Martin:</b> Sovetto<br /><b>Dmitrij Šostakovič</b> (1906-1975): Waltz from Suite No. 2<br /><b>Hubert Giraud</b> (1920): Sous le ciel de Paris<br /><b>Lucio Dalla</b> (1943-2012): Caruso <br /><b>Karl William Pamp Jenkins</b> (1944): Palladio<br /><b>Ennio Morricone</b> (1928): Once Upon a Time in the West<br /><b>Klaus Badelt</b> (1967): Pirates of the Caribbean<br /><b>Astor Piazzolla</b> (1921-1992): Tango <br /><br /><b>National Youth Orchestra Virtuoso</b><br /><b>Filip Tůma</b> – baritone<br /><b>Igor Dohovič</b> – conductor", "img" : "virtuoso.jpg", "path" : "virtuoso", "tickets" : "", "price" : "Vstup voľný", "price_en": "Entrance free" }) db.program.insert({ "datenum" : 24, "datemonth" : "jún", "datemonth": "June", "datetime" : "20:00", "place" : "Stará tržnica", "place_en": "Old City Market Hall", "title" : "Quasars Ensemble & Dalibor Karvay", "title_en": "Quasars Ensemble & Dalibor Karvay", "intro" : "Jedinečné spojenie hviezd súčasnej klasiky a husľového virtuóza Dalibora Karvaya.", "intro_en": "A unique alliance of stars of contemporary classical music with the violin virtuoso Dalibor Karvay.", "text" : "Komorné zoskupenie Quasars Ensemble pôsobí na slovenskej hudobnej scéne od roku 2008 a počas šiestich rokov sa vyprovilovalo na medzinárodne uznávaný súbor súčasnej klasickej hudby. Quasars Ensemble založil hudobný skladateľ, klavirista a dirigent Ivan Buffa a výnimočnosťou súboru je okrem iného aj fakt, že popri súčasnej klasickej hudbe sa v rovnakej miere venuje aj hudbe starších epoch. Súbor je pravidelným hosťom významných domácich i zahraničných hudobných festivalov, organizuje vlastné projekty v regiónoch Slovenska, má na konte šesť profilových CD albumov a je držiteľom ocenenia Krištáľové krídlo v kategórii „Hudba“ za rok 2013. Špeciálnym hosťom koncertu bude vynikajúci slovenský huslista Dalibor Karvay, ktorý v sprievode Quasars Ensemble pod taktovkou Ivana Buffu uvedie Waxmanovu verziu melódií z Bizetovej opery <em>Carmen</em> a Ravelovu virtuóznu skladbu <em>Tzigane</em>, ktoré v programe doplní <em>Septet</em> francúzskeho skladateľa s poľskými koreňmi Alexandra Tansmana a <em>Komorná hudba</em> č. 1, op. 24 nemeckého autora Paula Hindemitha.<br /><br /><b>Program:</b><br /><br /><b>Alexandre Tansman</b> (1897-1986): Septet<br /><b>Maurice Ravel</b> (1875-1937): Tzigane (arr. I. Buffa)<br /><b>Franz Waxman</b> (1906-1967): Fantázia Carmen (arr. I. Buffa)<br /><br />* * *<br /><br /><b>Alexander Moyzes</b> (1906-1984): Divertimento op. 11 (arr. I. Buffa)<br /><b>Paul Hindemith</b> (1895-1963): Komorná hudba č. 1, op. 24<br /><br /><b>Dalibor Karvay</b> – husle<br /><br /><b>Quasars Ensemble:</b><br /><b>Andrea Bošková</b> – flauta<br /><b>Júlia Csíziková</b> – hoboj<br /><b>Martin Mosorjak</b> – klarinet<br /><b>Attila Jankó</b> – fagot<br /><b>István Siket</b> – trúbka<br /><b>András Kovalcsik</b> – lesný roh<br /><b>Diana Buffa</b> – klavír<br /><b>Tamás Schlanger</b> – bicie<br /><b>Maroš Potokár</b> – 1. husle<br /><b>Peter Mosorjak</b> – 2. husle<br /><b>Peter Zwiebel</b> – viola<br /><b>Andrej Gál</b> – violončelo<br /><b>Marián Bujňák</b> – kontrabas<br /><b>Milan Osadský</b> – akordeón<br /><br /><b>Ivan Buffa</b> – dirigent", "text_en": "The chamber formation Quasars Ensemble has been active on the Slovak music scene since 2008, and in the course of six years has established its profile as an internationally respected formation in contemporary classical music. Quasars Ensemble was founded by the composer, pianist and conductor Ivan Buffa, and one of its exceptional features is that alongside contemporary classical music it also devotes itself in equal measure to the music of earlier epochs. The ensemble is a regular guest at leading music festivals at home and abroad, organises projects of its own in the regions of Slovakia, has six profile CD albums to its credit, and is the holder of the 2013 Crystal Wing Prize in the “Music” category. The concert’s special guest will be the outstanding Slovak violinist Dalibor Karvay, who, accompanied by Quasars Ensemble, will present the Waxman version of melodies from Bizet’s opera <em>Carmen</em> and Ravel’s virtuoso work <em>Tzigane</em>. These will be complemented in the programme by <em>Septet</em>, a work by the French composer (with Polish roots) Alexander Tansman, <em>Divertimento</em> Op. 11 by the Slovak composer Alexander Moyzes, and <em>Chamber Music</em> No. 1, Op. 24 by the German composer Paul Hindemith.<br /><br /><b>Program:</b><br /><br /><b>Alexandre Tansman</b> (1897-1986): Septet<br /><b>Maurice Ravel</b> (1875-1937): Tzigane (arr. I. Buffa)<br /><b>Franz Waxman</b> (1906-1967): Carmen Fantasie (arr. I. Buffa)<br /><br />* * *<br /><br /><b>Alexander Moyzes</b> (1906-1984): Divertimento Op. 11 (arr. I. Buffa)<br /><b>Paul Hindemith</b> (1895-1963): Chamber Music No. 1, Op. 24<br /><br /><b>Dalibor Karvay</b> – violin<br /><br /><b>Quasars Ensemble:</b><br /><br /><b>Andrea Bošková</b> – flute <br /><b>Júlia Csíziková</b> – oboe<br /><b>Martin Mosorjak</b> – clarinet<br /><b>Attila Jankó</b> – bassoon<br /><b>István Siket</b> – trumpet<br /><b>András Kovalcsik</b> – French horn<br /><b>Diana Buffa</b> – piano<br /><b>Tamás Schlanger</b> – percussions<br /><b>Maroš Potokár</b> – 1st violin<br /><b>Peter Mosorjak</b> – 2nd violin<br /><b>Peter Zwiebel</b> – viola<br /><b>Andrej Gál</b> – violoncello<br /><b>Marián Bujňák</b> – double bass <br /><b>Milan Osadský</b> – accordion<br /><br /><b>Ivan Buffa</b> – conductor", "img" : "karvay.jpg", "path" : "quasars-ensemble-dalibor-karvay", "tickets" : "http://www.ticketportal.sk/event.aspx?id=19624&idpartner=57", "price" : "" }) db.program.insert({ "datenum" : 25, "datemonth" : "jún", "datemonth_en" : "June", "datetime" : "20:00", "place" : "Stará tržnica", "place_en": "Old City Market Hall", "title" : "Xavier Sabata & Il Pomo D’Oro", "title_en" : "Xavier Sabata & Il Pomo D’Oro", "intro" : "„Tento projekt patrí k tomu najinteligentnejšiemu a najpozoruhodnejšiemu, čo sa v hudobnom svete zrodilo za posledné roky.“ (Guardian)", "intro_en": "“This project is one of the most intelligent and noteworthy that the music world has produced in recent years.” (Guardian)", "text" : "Španielsky kontratenorista Xavier Sabata vo svojom projekte Händel: Bad Guys odhaľuje svet záporných postáv v Händlových operách a presviedča nás o tom, že baroková opera nemusí byť len o vznešených antických ideáloch. Vďaka svojmu výnimočnému hlasu stvárňuje postavy bezočivých pokrytcov, zlomyseľných tyranov či dokonca obyčajných hlupákov s absolútnym nadhľadom a ľahkosťou. Xavier Sabata pochádza z Barcelony a má za sebou hosťovania na tých najprestížnejších svetových operných a koncertných pódiách. Do Bratislavy príde po prvýkrát so súborom Il pomo d’oro pod vedením huslistu a dirigenta Riccarda Minasiho, ktorý sa špecializuje na tzv. historicky poučenú interpretáciu starej hudby na dobových nástrojoch.<br /><br /><b>Program:</b><br /><br />Georg Friedrich Händel (1685-1759):<br />Sinfonia B dur, HWV 339, 1. časť<br />Vo‘ dar pace a un’alma altiera (Tamerlano)<br />Nella terra, in ciel, nell‘onda (Faramondo)<br />Concerto grosso G dur, HWV 314<br />Bel labbro formato (Ottone, re di Germania)<br />Dover, giustizia, amor (Ariodante)<br /><br />* * *<br /><br />Domerò la tua fierezza (Giulio Cesare)<br />Serenatevi, o luci belle (Teseo)<br />Se l’inganno sortisce felice (Ariodante)<br />Sonáta G dur op. 5, č. 4, HWV 399<br />Così suole a rio vicina (Faramondo)<br />Voglio stragi, e voglio morte (Teseo)<br /><br />Xavier Sabata – kontratenor<br /><br /><b>Il pomo d’oro:</b><br /><b>Alfia Bakieva</b> – husle<br /><b>Boris Begelman</b> – husle<br /><b>Ester Crazzolara</b> – husle <br /><b>Anna Fuskova</b> – husle<br /><b>Daniela Nuzzoli</b> – husle, viola<br /><b>Enrico Parizzi</b> – viola<br /><b>Federico Toffano</b> – violoncello<br /><b>Davide Nava</b> – kontrabas<br /><b>Maxim Emelyanychev</b> – cembalo <br /><br /><b>Riccardo Minasi</b> – husle, umelecké vedenie<br /><br /><a href='http://www.accioncultural.es' target='_blank'><img src='/static/images/partners/ace.jpg' /></a>", "text_en": "In his project <em>Händel: Bad Guys</em> the Spanish countertenor Xavier Sabata uncovers the world of the negative characters in Händel’s operas and persuades us of the fact that baroque opera need not only be about the sublime ideals of antiquity. Making use of his exceptional voice, he creates the characters of ruthless hypocrites, malignant tyrants, and pure-and-simple stupid asses, with absolute clarity and ease. Xavier Sabata comes from Barcelona and has a history of guest appearances on the world’s most prestigious opera and concert stages. He is coming to Bratislava for the first time with Il pomo d’oro ensemble, led by violinist and conductor Riccardo Minasi, who specialises in the so-called historically informed performance of early music on period instruments.<br /><br /><b>Program:</b><br /><br /><b>Georg Friedrich Händel (1685-1759):</b><br /><br />Sinfonia B flat major, HWV 339, 1st mov.<br />Vo‘ dar pace a un’alma altiera (Tamerlano)<br />Nella terra, in ciel, nell‘onda (Faramondo)<br />Concerto Grosso G major, HWV 314<br />Bel labbro formato (Ottone, re di Germania)<br />Dover, giustizia, amor (Ariodante)<br /><br />* * *<br /><br />Domerò la tua fierezza (Giulio Cesare)<br />Serenatevi, o luci belle (Teseo)<br />Se l’inganno sortisce felice (Ariodante)<br />Sonate G major Op. 5, No. 4, HWV 399<br />Così suole a rio vicina (Faramondo)<br />Voglio stragi, e voglio morte (Teseo)<br /><br /><b>Xavier Sabata</b> – countertenor<br /><br /><b>Il pomo d’oro:</b><br /><br /><b>Alfia Bakieva</b> – violin<br /><b>Boris Begelman</b> – violin<br /><b>Ester Crazzolara</b> – violin <br /><b>Anna Fuskova</b> – violin<br /><b>Daniela Nuzzoli</b> – violin, viola<br /><b>Enrico Parizzi</b> – viola<br /><b>Federico Toffano</b> – violoncello<br /><b>Davide Nava</b> – double bass<br /><b>Maxim Emelyanychev</b> – cembalo <br /><br /><b>Riccardo Minasi</b> – violin, conductor<br /><br /><a href='http://www.accioncultural.es' target='_blank'><img src='/static/images/partners/ace.jpg' /></a>", "img" : "sabata.jpg", "path" : "handel-bad-guys", "tickets" : "http://www.ticketportal.sk/event.aspx?id=19543&idpartner=57", "price" : "" }) db.program.insert({ "datenum" : 26, "datemonth" : "jún", "datemonth_en": "June", "datetime" : "20:00", "place" : "Stará tržnica", "place_en": "Old City Market Hall", "title" : "Korben Dallas Symphony", "title_en" : "Korben Dallas Symphony", "intro" : "Výnimočný objav slovenskej hudobnej scény v sprievode orchestra na Viva Musica! festivale.", "intro_en": "Underground goes classic!", "text" : "Korben Dallas minulý rok pokrstil svoj druhý album Karnevalová vrana. V éteri bodujú hity Otec, Zlatý jeleň a Beh a po rokoch v hudobnom podzemí sa Korben Dallas stáva mienkotvornou kapelou. Skupinu založili spevák a gitarista Juraj Benetin a basgitarista Lukáš Fila po rozpade skupiny Appendix, v ktorej spolu hrali trinásť rokov. Bubeníkom sa po dlhom hľadaní stal Ozo Guttler zo skupiny Tu v Dome. Kapela debutovala živým albumom Pekné cesty v roku 2011 a má za sebou okrem hrania v rámci niekoľkých hudobných festivalov aj spoločné koncerty s americkou pesničkárkou Jess Klein, spoluprácu s Ľubom Petruškom z Chiki liki tu-a či s Andrejom Šebanom. S orchestrom však Korben Dallas ešte nikdy nehral – v exkluzívnej premiére po prvýkrát na Viva Musica! festivale!<br /><br /><b>Korben Dallas</b><br /><b>Juraj Benetin</b> – spev, gitara<br /><b>Lukáš Fila</b> – basgitara<br /><b>Ozo Guttler</b> – bicie <br /><br /><b>Špeciálny hosť:</b><br /><b>Ľubo Petruška</b> – gitara (Chiki liki tu-a)<br /><br /><b>Sinfonietta Bratislava</b><br /><b>Braňo Kostka</b> – dirigent <br /><br /><b>Slavomír Solovic</b> – aranžmány", "text_en": "Last year Korben Dallas named his second album <em>Carnival Raven</em>. The hit tunes <em>Father, Golden Deer</em> and <em>Run</em> have won favour on the ether, and after years in the musical underground Korben Dallas has become a trend-setting band. The group was founded by singer and guitarist Juraj Benetin and bass guitarist Lukáš Fila after the break-up of Appendix, where they had played together for thirteen years. After much searching, Ozo Guttler from the Here at Home group became the drummer. The band made its debut with the live album <em>Fine Roads</em> in 2011, and apart from playing in a number of music festivals it has also performed concerts together with the American singer Jess Klein and collaborated with Ľuboš Petruška of Chiki liki tu-a and Andrej Šeban. However, up to now Korben Dallas has never played with an orchestra – here it is in an exclusive premiere, first time in the Viva Musica! Festival, under the baton of Braňo Kostka!<br /><br /><b>Korben Dallas</b><br /><b>Juraj Benetin</b> – vocals, guitar<br /><b>Lukáš Fila</b> – bassguitar<br /><b>Ozo Guttler</b> – drums<br /><br /><b>Special guest:</b><br /><b>Ľubo Petruška</b> – guitar (Chiki liki tu-a)<br /><br /><b>Sinfonietta Bratislava</b><br /><b>Braňo Kostka</b> – conductor<br /><br /><b>Slavomír Solovic</b> – arranger", "img" : "korben.jpg", "path" : "korben-dallas-symphony", "tickets" : "http://www.ticketportal.sk/event.aspx?id=19545&idpartner=57", "price" : "" }) db.program.insert({ "datenum" : 27, "datemonth" : "jún", "datemonth_en": "June", "datetime" : "22:00 <br /><br />24:00 hororová noc v Gorila.sk Urban Space", "place" : "Stará tržnica", "place_en": "Old City Market Hall", "title" : "Upír Nosferatu", "title_en": "Nosferatu", "intro" : "„Keď sa za Hutterom samé od seba zavreli dvere hradu, jeho osud bol spečatený. Mal sa stať prvou svetoznámou obeťou prvého svetoznámeho upíra a ako mu povedal Dr. Sievers: Svojmu osudu neutečiete.“ (www.kinema.sk)", "intro_en": "The legendary silent film with orchestral accompaniment.<br /><br />24:00 a night of horror in Gorila.sk Urban Space", "text" : "Každý ho pozná, ale málokto ho v súčasnosti naozaj videl. Reč je o klasickom nemeckom nemom filme Upír Nosferatu (Nosferatu, eine Symphonie des Grauens; r. F. W. Murnau, 1922), ktorý už takmer storočie desí obecenstvo na celom svete. Keď sa nemecký expresionista Friedrich Wilhelm Murnau rozhodol natočiť adaptáciu slávneho románu Brama Stokera Dracula netušil, že vytvorí nadčasové dielo, ktoré budú filmoví vedci študovať ešte dlho po jeho smrti, a ktoré položí základy filmového hororu. Mladý úradník realitnej kancelárie Hutter prichádza do Transylvánie na hrad bohatého kupca, grófa Orloka, avšak po jeho návrate už nič nie je tak ako predtým... V rámci Viva Musica! festivalu uvedieme Murnauov film s autorskou hudbou slovenského skladateľa Vladislava Šarišského, laureáta prestížnej Medzinárodnej súťaže Sergeja Prokofieva v Petrohrade a držiteľa Ceny pre mladého tvorcu udeľovanej Nadáciou Tatra banky.<br /><br /><b>Vladislav „Slnko“ Šarišský</b> – autor hudby, hudobné naštudovanie, theremin<br /><br /><b>Adam Novák</b> – 1. husle<br />J<b>án Kružliak, ml.</b> – 2. husle<br /><b>Martin Mierny</b> – viola <br /><b>Boris Bohó</b> – violončelo <br /><b>Milan Osadský</b> – akordeón<br />Štefan Bugala – tympany, perkusie<br /><br /><b>Po koncerte v Starej tržnici pokračujeme hororovou nocou v Gorila.sk Urban Space!<br /><br />Príďte sa báť po kultovom Draculovi do Gorila.sk Urban Space. 27. júna o 24:00 na Nám. SNP začína hororová noc. Prinesieme vám tri hororové filmy, ktoré vybrali fanúšikovia tohto žánru. Úplne vážne: návštevu odporúčame len tým, ktorí sa neboja!</b><br /><br />PROGRAM<br /><br /><b>Sinister</b> (2012, USA)<br /><b>V zajatí démonov</b> (2013, USA)<br /><b>Tucker a Dale vs. Zlo</b> (2010, USA)", "text_en": "Everyone knows of it, but at the present day few have actually seen it. We are referring to the classical German silent film <em>Nosferatu</em> (<em>Nosferatu, eine Symphonie des Grauens</em>; dir. F. W. Murnau, 1922), which for almost a century has been frightening audiences throughout the world. When the German expressionist Friedrich Wilhelm Murnau decided to film an adaptation of Bram Stoker’s famous novel <em>Dracula</em>, he had no idea he was about to create a timeless work which would lay the foundations of film horror and would be studied by scholars of film long after his death. A young clerk of the Hutter estate agency comes to Transylvania to the castle of Count Orlok, a wealthy merchant, but after his return nothing is as it was before... As part of the Viva Musica! Festival we are presenting Murnau’s film with music written by the Slovak composer Vladislav “Sun” Šarišský, laureate of the prestigious international Sergej Prokofiev Competition in St. Petersburg and holder of the Young Artist’s Prize awarded by the Tatra Bank Foundation.<br /><br /><b>Vladislav „Slnko“ Šarišský</b> – composer, theremin<br /><br /><b>Adam Novák</b> – 1st violin<br /><b>Ján Kružliak, ml.</b> – 2nd violin<br /><b>Martin Mierny</b> – viola <br /><b>Boris Bohó</b> – violoncello <br /><b>Milan Osadský</b> – accordion<br /><b>Štefan Bugala</b> – timpani, percussions<br /><br /><em>After the concert in the Old City Market Hall, we continue with the night of horror in Gorila.sk Urban Space! At 24:00, June 27, the night of horror begins at SNP Square. We are bringing you three horror films which fans of this genre have selected. Quite seriously: we recommend a visit only to those who aren’t scared!</em><br /><br /><b>Programme of the night of horror:</b><br /><br /><b>Sinister</b> (2012, USA)<br /><b>The Conjuring</b> (2013, USA)<br /><b>Tucker & Dale vs. Evil</b> (2010, USA)", "img" : "nosferatu.jpg", "path" : "upir-nosferatu", "tickets" : "http://www.ticketportal.sk/event.aspx?ID=19546&idpartner=57", "price" : "" }) db.program.insert({ "datenum" : 28, "datemonth" : "jún", "datemonth_en" : "June", "datetime" : "20:00", "place" : "Bratislavský hrad", "place_en": "Bratislava Castle", "title" : "Viva Opera!", "title_en" : "Viva Opera!", "intro" : "Svetoznáme slovenské operné hviezdy po prvýkrát spolu na jednom pódiu!", "intro_en": "World-famous opera stars together on one stage for the first time!", "text" : "Záverečný koncert 10. ročníka medzinárodného festivalu Viva Musica! bude oslavou opery. Na jednom pódiu sa po prvýkrát spolu stretnú najlepší slovenskí operní sólisti – Adriana Kučerová, Jana Kurucová, Miroslav Dvorský, Dalibor Jenis a Štefan Kocán, ktorí v sprievode Orchestra Viva Musica! pod taktovkou Martina Leginusa, súčasného hudobného riaditeľa a šéfdirigenta pražskej Štátnej opery, uvedú známe i menej známe operné lahôdky z pera takých operných majstrov, akými boli Giuseppe Verdi, Georges Bizet, Gioacchino Rossini či Giacomo Puccini. Viva Opera!<br /><br />Program:<br /><br /><b>Gioacchino Rossini</b> (1792-1868):<br />Barbier zo Sevilly, predohra<br />Largo al factotum, ária z opery Barbier zo Sevilly<br />Oh patria!... Di tanti palpiti, ária z opery Tancredi <br />La calunnia é un venticello, ária z opery Barbier zo Sevilly <br /><br /><b>Gaetano Donizetti</b> (1797-1848):<br />Quel guardo il cavaliere, ária z opery Don Pasquale<br /><br /><b>Giuseppe Verdi</b> (1813-1901):<br />Lunge da lei... De‘ miei bollenti spiriti, ária z opery La traviata<br />Propizio ei giunge... Vieni a me, ti benedico, duet z opery Simon Boccanegra<br />Vanne la tua meta gia vedo... Credo, in un dio crudel, ária z opery Otello <br />E lui!... desso... l’Infante!... Dio che nell’alma infondere, duet z opery Don Carlos<br /><br /><b>Arrigo Boito</b> (1842-1918):<br />Son lo spirito che nega, ária z opery Mefistofeles<br /><br /><b>Giuseppe Verdi:</b><br />Bella figlia dell'amore, kvartet z opery Rigoletto <br /><br />* * *<br /><br /><b>Georges Bizet</b> (1838-1875):<br />Carmen, predohra<br />Les tringles des sistres tintaient, ária z opery Carmen <br />Votre toast, je peux vous le rendre, ária z opery Carmen<br />La fleur que tu m'avais jetée, ária z opery Carmen <br /><br /><b>Léo Delibes</b> (1836-1891):<br />Viens, Mallika, les lianes en fleurs... Dôme épais, le jasmin, duet z opery Lakmé <br /><br /><b>Jacques-François-Fromental-Élie Halévy <b/>(1799-1862):<br />Si la rigueur et la vengeance, ária z opery Židovka<br /><br /><b>Léo Delibes:</b><br />Les filles de Cadix<br /><br />Adriana Kučerová – soprán<br /><b>Jana Kurucová</b> – mezzosoprán<br /><b>Miroslav Dvorský</b> – tenor <br /><b>Dalibor Jenis</b> – barytón<br /><b>Štefan Kocán</b> – bas<br /><br /><b>Orchester Viva Musica!</b><br /><br /><b>Martin Leginus</b> – dirigent", "text_en": " The concluding concert of the 10th annual international Viva Musica! Festival will be a celebration of opera. For the first time the finest Slovak opera soloists will meet on the same stage. Adriana Kučerová, Jana Kurucová, Miroslav Dvorský, Dalibor Jenis and Štefan Kocán, accompanied by the Orchestra Viva Musica! under the baton of Martin Leginus, musical director and chief conductor of the Prague State Opera, will perform well-known and less well-known delights of opera, composed by such masters as Giuseppe Verdi, Georges Bizet, Gioacchino Rossini, and Giacomo Puccini. Viva opera!.<br /><br /><b>Program:</b><br /><br /><b>Gioacchino Rossini</b> (1792-1868):<br />The Barber of Seville, overture<br />Largo al factotum, aria from The Barber of Seville<br />Oh patria!... Di tanti palpiti, aria from Tancredi <br />La calunnia é un venticello, aria from The Barber of Seville<br /><br /><b>Gaetano Donizetti</b> (1797-1848):<br />Quel guardo il cavaliere, aria from Don Pasquale<br /><br /><b>Giuseppe Verdi</b> (1813-1901):<br />Lunge da lei... De‘ miei bollenti spiriti, aria from La traviata<br />Propizio ei giunge... Vieni a me, ti benedico, duet from Simon Boccanegra<br />Vanne la tua meta gia vedo... Credo, in un dio crudel, aria from Otello <br />E lui!... desso... l’Infante!... Dio che nell’alma infondere, duet from Don Carlos<br /><br /><b>Arrigo Boito</b> (1842-1918):<br />Son lo spirito che nega, aria from Mefistofele<br /><br /><b>Giuseppe Verdi:</b><br />Bella figlia dell'amore, quartet from Rigoletto <br /><br />* * *<br /><br /><b>Georges Bizet</b> (1838-1875):<br />Carmen, overture<br />Les tringles des sistres tintaient, aria from Carmen <br />Votre toast, je peux vous le rendre, aria from Carmen<br />La fleur que tu m'avais jetée, aria from Carmen <br /><br /><b>Léo Delibes</b> (1836-1891):<br />Viens, Mallika, les lianes en fleurs... Dôme épais, le jasmin, duet from Lakmé <br /><br /><b>Jacques-François-Fromental-Élie Halévy</b> (1799-1862): <br />Si la rigueur et la vengeance, aria from La Juive<br /><br /><b>Léo Delibes:</b><br />Les filles de Cadix<br /><br /><b>Adriana Kučerová</b> – soprano<br /><b>Jana Kurucová</b> – mezzosoprano<br /><b>Miroslav Dvorský</b> – tenor <br /><b>Dalibor Jenis</b> – baritone<br /><b>Štefan Kocán</b> – bass<br /><br /><b>Viva Musica! orchestra</b><br /><b>Martin Leginus</b> – conductor", "img" : "opera.jpg", "path" : "viva-opera", "tickets" : "http://www.ticketportal.sk/event.aspx?id=19471&idpartner=57", "price" : "" })
DB update
mongosave.js
DB update
<ide><path>ongosave.js <ide> "title_en": "A Midsummer Night’s Dream", <ide> "intro" : "Otvárací koncert 10. ročníka Viva Musica! festivalu a Kultúrneho leta a Hradných slávností Bratislava 2014", <ide> "intro_en": "Opening concert in the 10th annual Viva Musica! Festival and Bratislava Cultural Summer and Castle Festival 2014", <del> "text" : "Viva Musica! festival v roku 2014 oslavuje okrúhle 10. narodeniny a svojim návštevníkom opäť ponúkne niekoľko exkluzívnych hudobných zážitkov. V rámci otváracieho koncertu uvedieme v spolupráci s medzinárodným festivalom Letné shakespearovské slávnosti a Kultúrne leto a hradné slávnosti Bratislava 2014 Shakespearovu romantickú komédiu <em>Sen noci svätojánskej</em> s rovnomennou scénickou hudbou nemeckého hudobného skladateľa Felixa Mendelssohna-Bartholdyho (1809-1847). Shakespearov text ožije v podaní Sabiny Laurinovej, Oldřicha Víznera a Csongora Kassaia v sprievode Mendelssohnovej hudby interpretovanej Slovenskou filharmóniou pod vedením Leoša Svárovského. Viva Shakespeare!<br /><br /><b>Realizačný tím:</b><br /><br />Preklad: Martin Hilský, Ľubomír Feldek<br />Dramaturgia a réžia: Róbert Mankovecký<br />Producent za LSS: Janka Zednikovičová<br /><br /><b>Účinkujú:</b><br /><br />Oberon – Oldřich Vízner<br />Titania – Sabina Laurinová<br />Puk – Csongor Kassai<br />Klbko – Peter Kadlečík<br />Väzba – Jakub Rybárik<br /><br />Petronela Drobná – soprán<br />Katarína Kubovičová-Sroková – alt<br />Ženský spevácky zbor<br />Jozef Chabroň – zbormajster<br /><br /><b>Slovenská filharmónia</b><br />Leoš Svárovský – dirigent", <del> "text_en": "In 2014 the Viva Musica! Festival celebrates its 10th birthday and again offers its visitors some exquisite musical delights. In collaboration with the international Summer Shakespeare Festival, Bratislava Cultural Summer and Castle Festival 2014, our opening concert presents Shakespeare’s romantic comedy <em>A Midsummer Night’s Dream</em> with the identically-named music by the German composer Felix Mendelssohn-Bartholdy (1809-1847). Shakespeare’s text is vividly rendered by Sabina Laurinová, Oldřich Vízner and Csongor Kassai, accompanied by Mendelssohn’s music performed by the Slovak Philharmonic and members of the Slovak Philharmonic Choir conducted by Leoš Svárovský. Viva Shakespeare!<br /><br /><b>Production team:</b><br /><br />Translators: Martin Hilský, Ľubomír Feldek<br />Dramaturge and director: Róbert Mankovecký<br />Producer for SSF: Janka Zednikovičová<br /><br /><b>Performers:</b><br /><br />Oberon – Oldřich Vízner<br />Titania – Sabina Laurinová<br />Puck – Csongor Kassai<br />Bottom – Peter Kadlečík<br />Quince – Jakub Rybárik<br /><br />Petronela Drobná – soprano<br />Katarína Kubovičová-Sroková – alto<br />Members of the Slovak Philharmonic Choir<br />Jozef Chabroň –choirmaster<br /><br />Slovak Philharmonic<br />Leoš Svárovský – conductor", <add> "text" : "Viva Musica! festival v roku 2014 oslavuje okrúhle 10. narodeniny a svojim návštevníkom opäť ponúkne niekoľko exkluzívnych hudobných zážitkov. V rámci otváracieho koncertu uvedieme v spolupráci s medzinárodným festivalom Letné shakespearovské slávnosti a Kultúrne leto a hradné slávnosti Bratislava 2014 Shakespearovu romantickú komédiu <em>Sen noci svätojánskej</em> s rovnomennou scénickou hudbou nemeckého hudobného skladateľa Felixa Mendelssohna-Bartholdyho (1809-1847). Shakespearov text ožije v podaní Sabiny Laurinovej, Oldřicha Víznera a Csongora Kassaia v sprievode Mendelssohnovej hudby interpretovanej Slovenskou filharmóniou pod vedením Leoša Svárovského. Viva Shakespeare!<br /><br /><b>Realizačný tím:</b><br /><br />Preklad: Martin Hilský, Ľubomír Feldek<br />Dramaturgia a réžia: Róbert Mankovecký<br />Producent za LSS: Janka Zednikovičová<br /><br /><b>Účinkujú:</b><br /><br />Oberon – <b>Oldřich Vízner</b><br />Titania – <b>Sabina Laurinová</b><br />Puk – <b>Csongor Kassai</b><br />Klbko – <b>Peter Kadlečík</b><br />Väzba – <b>Jakub Rybárik</b><br /><br /><b>Petronela Drobná</b> – soprán<br /><b>Katarína Kubovičová-Sroková</b> – alt<br /><b>Ženský spevácky zbor</b><br /><b>Jozef Chabroň</b> – zbormajster<br /><br /><b>Slovenská filharmónia</b><br /><b>Leoš Svárovský</b> – dirigent", <add> "text_en": "In 2014 the Viva Musica! Festival celebrates its 10th birthday and again offers its visitors some exquisite musical delights. In collaboration with the international Summer Shakespeare Festival, Bratislava Cultural Summer and Castle Festival 2014, our opening concert presents Shakespeare’s romantic comedy <em>A Midsummer Night’s Dream</em> with the identically-named music by the German composer Felix Mendelssohn-Bartholdy (1809-1847). Shakespeare’s text is vividly rendered by Sabina Laurinová, Oldřich Vízner and Csongor Kassai, accompanied by Mendelssohn’s music performed by the Slovak Philharmonic and members of the Slovak Philharmonic Choir conducted by Leoš Svárovský. Viva Shakespeare!<br /><br /><b>Production team:</b><br /><br />Translators: Martin Hilský, Ľubomír Feldek<br />Dramaturge and director: Róbert Mankovecký<br />Producer for SSF: Janka Zednikovičová<br /><br /><b>Performers:</b><br /><br />Oberon – <b>Oldřich Vízner</b><br />Titania – <b>Sabina Laurinová</b><br />Puck – <b>Csongor Kassai</b><br />Bottom – <b>Peter Kadlečík</b><br />Quince – <b>Jakub Rybárik</b><br /><br /><b>Petronela Drobná</b> – soprano<br /><b>Katarína Kubovičová-Sroková</b> – alto<br /><b>Members of the Slovak Philharmonic Choir</b><br /><b>Jozef Chabroň</b> – choirmaster<br /><br /><b>Slovak Philharmonic</b><br /><b>Leoš Svárovský</b> – conductor", <ide> "img" : "sen.jpg", <ide> "path" : "sen-noci-svatojanskej", <ide> "tickets" : "",
Java
mit
0c75a95e67e6ea5662ebc6a6472895f0b1ee699c
0
TechCavern/WaveTact
package com.techcavern.wavetact.utils; import com.google.common.io.Files; import com.techcavern.wavetact.annot.ConCMD; import com.techcavern.wavetact.objects.NetProperty; import org.jooq.Record; import org.pircbotx.Colors; import com.techcavern.wavetact.annot.IRCCMD; import com.techcavern.wavetact.objects.ConsoleCommand; import com.techcavern.wavetact.objects.IRCCommand; import org.flywaydb.core.Flyway; import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.jooq.impl.DSL; import org.pircbotx.PircBotX; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Field; import java.sql.Connection; import java.sql.DriverManager; import java.util.Scanner; import java.util.Set; import java.util.concurrent.TimeUnit; import static com.techcavern.wavetactdb.Tables.BANS; public class LoadUtils { public static void initiateDatabaseConnection() throws Exception { Flyway flyway = new Flyway(); flyway.setDataSource("jdbc:sqlite:./db.sqlite", null, null); flyway.migrate(); System.err.println("Getting connection..."); Class.forName("org.sqlite.JDBC"); Connection conn = DriverManager.getConnection("jdbc:sqlite:./db.sqlite"); System.err.println("Creating DSLContext..."); Registry.WaveTactDB = DSL.using(conn, SQLDialect.SQLITE); Registry.wundergroundapikey = DatabaseUtils.getConfig("wundergroundapikey"); Registry.wolframalphaapikey = DatabaseUtils.getConfig("wolframalphaapikey"); Registry.wordnikapikey = DatabaseUtils.getConfig("wordnikapikey"); Registry.googleapikey = DatabaseUtils.getConfig("googleapikey"); } public static void registerIRCCommands() { Set<Class<?>> classes = Registry.wavetactreflection.getTypesAnnotatedWith(IRCCMD.class); for (Class<?> clss : classes) { try { Registry.IRCCommands.add(((IRCCommand) clss.newInstance())); } catch (Exception e) { e.printStackTrace(); } } } public static void registerConsoleCommands() { Set<Class<?>> classes = Registry.wavetactreflection.getTypesAnnotatedWith(ConCMD.class); for (Class<?> clss : classes) { try { Registry.ConsoleCommands.add(((ConsoleCommand) clss.newInstance())); } catch (Exception e) { e.printStackTrace(); } } } public static void registerAttacks() { Registry.Attacks.add("sends a 53 inch monitor flying at $*"); Registry.Attacks.add("shoots a rocket at $*"); Registry.Attacks.add("punches $* right in the crotch"); Registry.Attacks.add("packs $* up and ships it off to another galaxy"); Registry.Attacks.add("eats $* up for breakfast"); Registry.Attacks.add("sends a flying desk at $*"); Registry.Attacks.add("swallows $* whole"); Registry.Attacks.add("ties $* up and feeds it to a shark"); Registry.Attacks.add("runs over $* with a car"); Registry.Attacks.add("throws a racket at $*"); Registry.Attacks.add("gobbles up $*"); Registry.Attacks.add("throws a 2000 pound object at $*"); Registry.Attacks.add("starts throwing punches at $*"); Registry.Attacks.add("sends a flying dragon at $*"); Registry.Attacks.add("takes over $*'s computers and blasts porn at full volume"); Registry.Attacks.add("packs $* up and ships them off to Apple"); Registry.Attacks.add("hands $* off to Lord Voldemort"); Registry.Attacks.add("hands $* off to a pack of a wolves"); Registry.Attacks.add("hands $* off to a herd of centaurs"); Registry.Attacks.add("drops $* off to a 2000 kilometer cliff"); Registry.Attacks.add("flies $* out into the middle of nowhere"); Registry.Attacks.add("hunts $* down with a gun"); Registry.Attacks.add("slaps $* around with a large trout"); Registry.Attacks.add("throws iphones at $*"); Registry.Attacks.add("fires missile at $*"); Registry.Attacks.add("puts $* in a rocket and sends them off to pluto"); Registry.Attacks.add("forcefeeds $* a plate of poisoned beef"); Registry.Attacks.add("mind controls $* to marry Dolores Umbridge"); Registry.Attacks.add("throws poorly written code at $*"); Registry.Attacks.add("throws knives at $*"); Registry.Attacks.add("throws various objects at $*"); Registry.Attacks.add("throws rocks at $*"); Registry.Attacks.add("throws grenades at $*"); Registry.Attacks.add("throws IE6 at $*"); Registry.Attacks.add("throws axes at $*"); Registry.Attacks.add("throws evil things at $*"); Registry.Attacks.add("throws netsplits at $*"); Registry.Attacks.add("throws hammers at $*"); Registry.Attacks.add("throws spears at $*"); Registry.Attacks.add("throws spikes at $*"); Registry.Attacks.add("throws $* into a burning building"); Registry.Attacks.add("throws sharp things at $*"); Registry.Attacks.add("throws moldy bread at $*"); Registry.Attacks.add("throws mojibake at $*"); Registry.Attacks.add("throws floppy disks at $*"); Registry.Attacks.add("throws nails at $*"); Registry.Attacks.add("throws burning planets at $*"); Registry.Attacks.add("throws thorns at $*"); Registry.Attacks.add("throws skulls at $*"); Registry.Attacks.add("throws a fresh, unboxed copy of Windows Me at $*"); Registry.Attacks.add("casts fire at $*"); Registry.Attacks.add("casts ice at $*"); Registry.Attacks.add("casts death at $*"); Registry.Attacks.add("casts " + Colors.BOLD + "DEATH" + Colors.BOLD + " at $*"); Registry.Attacks.add("casts poison at $*"); Registry.Attacks.add("casts stupid at $*"); Registry.Attacks.add("attacks $* with knives"); Registry.Attacks.add("attacks $* with idiots from #freenode"); Registry.Attacks.add("attacks $* with an army of trolls"); Registry.Attacks.add("attacks $* with oper abuse"); Registry.Attacks.add("attacks $* with confusingly bad english"); Registry.Attacks.add("attacks $* with Windows Me"); Registry.Attacks.add("attacks $* with Quicktime for Windows"); Registry.Attacks.add("attacks $* with ???"); Registry.Attacks.add("attacks $* with segmentation faults"); Registry.Attacks.add("attacks $* with relentless spyware"); Registry.Attacks.add("attacks $* with NSA spies"); Registry.Attacks.add("attacks $* with tracking devices"); Registry.Attacks.add("attacks $* with a botnet"); } public static void registerEightball() { Registry.Eightball.add("Hmm.. not today"); Registry.Eightball.add("YES!"); Registry.Eightball.add("Maybe"); Registry.Eightball.add("Nope."); Registry.Eightball.add("Sources say no."); Registry.Eightball.add("Definitely"); Registry.Eightball.add("I have my doubts"); Registry.Eightball.add("Signs say yes"); Registry.Eightball.add("Cannot predict now"); Registry.Eightball.add("It is certain"); Registry.Eightball.add("Sure"); Registry.Eightball.add("Outlook decent"); Registry.Eightball.add("Very doubtful"); Registry.Eightball.add("Perhaps now is not a good time to tell you"); Registry.Eightball.add("Concentrate and ask again"); Registry.Eightball.add("Forget about it"); Registry.Eightball.add("Don't count on it"); } public static void addDir(String s) throws IOException { try { Field field = ClassLoader.class.getDeclaredField("usr_paths"); field.setAccessible(true); String[] paths = (String[]) field.get(null); for (String path : paths) { if (s.equals(path)) { return; } } String[] tmp = new String[paths.length + 1]; System.arraycopy(paths, 0, tmp, 0, paths.length); tmp[paths.length] = s; field.set(null, tmp); System.setProperty("java.library.path", System.getProperty("java.library.path") + File.pathSeparator + s); } catch (IllegalAccessException e) { throw new IOException("Failed to get permissions to set library path"); } catch (NoSuchFieldException e) { throw new IOException("Failed to get field handle to set library path"); } } public static void initializeMessageQueue(){ for(NetProperty network:Registry.NetworkName) { class MessageQueue implements Runnable { @Override public void run() { try { TimeUnit.SECONDS.sleep(30); } catch (InterruptedException c) { } while (true) { try { if (Registry.MessageQueue.size() > 0 && network.getNetwork().equals(Registry.MessageQueue.get(0).getNetwork())) { Registry.MessageQueue.get(0).getNetwork().sendRaw().rawLine(Registry.MessageQueue.get(0).getProperty()); Registry.MessageQueue.remove(0); TimeUnit.MILLISECONDS.sleep(900); } TimeUnit.MILLISECONDS.sleep(100); } catch (Exception e) { } } } } Registry.threadPool.execute(new MessageQueue()); } } public static void initalizeBanQueue() { class BanQueue implements Runnable { @Override public void run() { try { TimeUnit.SECONDS.sleep(120); } catch (InterruptedException c) { // ignored } while (true) { try { for (Record banRecord : DatabaseUtils.getBans()) { try { if (System.currentTimeMillis() >= banRecord.getValue(BANS.TIME) + banRecord.getValue(BANS.INIT)) { PircBotX networkObject = IRCUtils.getBotByNetworkName(banRecord.getValue(BANS.NETWORK)); IRCUtils.setMode(IRCUtils.getChannelbyName(networkObject, banRecord.getValue(BANS.CHANNEL)), networkObject, "-" + banRecord.getValue(BANS.PROPERTY), banRecord.getValue(BANS.HOSTMASK)); DatabaseUtils.removeBan(banRecord.getValue(BANS.NETWORK), banRecord.getValue(BANS.CHANNEL), banRecord.getValue(BANS.HOSTMASK), banRecord.getValue(BANS.ISMUTE)); } } catch (IllegalArgumentException | NullPointerException e) { // ignored } } TimeUnit.SECONDS.sleep(120); } catch (InterruptedException e) { e.printStackTrace(); } } } } Registry.threadPool.execute(new BanQueue()); } }
src/main/java/com/techcavern/wavetact/utils/LoadUtils.java
package com.techcavern.wavetact.utils; import com.google.common.io.Files; import com.techcavern.wavetact.annot.ConCMD; import com.techcavern.wavetact.objects.NetProperty; import org.jooq.Record; import org.pircbotx.Colors; import com.techcavern.wavetact.annot.IRCCMD; import com.techcavern.wavetact.objects.ConsoleCommand; import com.techcavern.wavetact.objects.IRCCommand; import org.flywaydb.core.Flyway; import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.jooq.impl.DSL; import org.pircbotx.PircBotX; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Field; import java.sql.Connection; import java.sql.DriverManager; import java.util.Scanner; import java.util.Set; import java.util.concurrent.TimeUnit; import static com.techcavern.wavetactdb.Tables.BANS; public class LoadUtils { public static void initiateDatabaseConnection() throws Exception { Flyway flyway = new Flyway(); flyway.setDataSource("jdbc:sqlite:./db.sqlite", null, null); flyway.migrate(); System.err.println("Getting connection..."); Class.forName("org.sqlite.JDBC"); Connection conn = DriverManager.getConnection("jdbc:sqlite:./db.sqlite"); System.err.println("Creating DSLContext..."); Registry.WaveTactDB = DSL.using(conn, SQLDialect.SQLITE); Registry.wundergroundapikey = DatabaseUtils.getConfig("wundergroundapikey"); Registry.wolframalphaapikey = DatabaseUtils.getConfig("wolframalphaapikey"); Registry.wordnikapikey = DatabaseUtils.getConfig("wordnikapikey"); Registry.googleapikey = DatabaseUtils.getConfig("googleapikey"); } public static void registerIRCCommands() { Set<Class<?>> classes = Registry.wavetactreflection.getTypesAnnotatedWith(IRCCMD.class); for (Class<?> clss : classes) { try { Registry.IRCCommands.add(((IRCCommand) clss.newInstance())); } catch (Exception e) { e.printStackTrace(); } } } public static void registerConsoleCommands() { Set<Class<?>> classes = Registry.wavetactreflection.getTypesAnnotatedWith(ConCMD.class); for (Class<?> clss : classes) { try { Registry.ConsoleCommands.add(((ConsoleCommand) clss.newInstance())); } catch (Exception e) { e.printStackTrace(); } } } public static void registerAttacks() { Registry.Attacks.add("sends a 53 inch monitor flying at $*"); Registry.Attacks.add("shoots a rocket at $*"); Registry.Attacks.add("punches $* right in the crotch"); Registry.Attacks.add("packs $* up and ships it off to another galaxy"); Registry.Attacks.add("eats $* up for breakfast"); Registry.Attacks.add("sends a flying desk at $*"); Registry.Attacks.add("swallows $* whole"); Registry.Attacks.add("ties $* up and feeds it to a shark"); Registry.Attacks.add("runs over $* with a car"); Registry.Attacks.add("throws a racket at $*"); Registry.Attacks.add("gobbles up $*"); Registry.Attacks.add("throws a 2000 pound object at $*"); Registry.Attacks.add("starts throwing punches at $*"); Registry.Attacks.add("sends a flying dragon at $*"); Registry.Attacks.add("takes over $*'s computers and blasts porn at full volume"); Registry.Attacks.add("packs $* up and ships them off to Apple"); Registry.Attacks.add("hands $* off to Lord Voldemort"); Registry.Attacks.add("hands $* off to a pack of a wolves"); Registry.Attacks.add("hands $* off to a herd of centaurs"); Registry.Attacks.add("drops $* off to a 2000 kilometer cliff"); Registry.Attacks.add("flies $* out into the middle of nowhere"); Registry.Attacks.add("hunts $* down with a gun"); Registry.Attacks.add("slaps $* around with a large trout"); Registry.Attacks.add("throws iphones at $*"); Registry.Attacks.add("fires missile at $*"); Registry.Attacks.add("puts $* in a rocket and sends them off to pluto"); Registry.Attacks.add("forcefeeds $* a plate of poisoned beef"); Registry.Attacks.add("mind controls $* to marry Dolores Umbridge"); Registry.Attacks.add("throws poorly written code at $*"); Registry.Attacks.add("throws knives at $*"); Registry.Attacks.add("throws various objects at $*"); Registry.Attacks.add("throws rocks at $*"); Registry.Attacks.add("throws grenades at $*"); Registry.Attacks.add("throws IE6 at $*"); Registry.Attacks.add("throws axes at $*"); Registry.Attacks.add("throws evil things at $*"); Registry.Attacks.add("throws netsplits at $*"); Registry.Attacks.add("throws hammers at $*"); Registry.Attacks.add("throws spears at $*"); Registry.Attacks.add("throws spikes at $*"); Registry.Attacks.add("throws sharp things at $*"); Registry.Attacks.add("throws moldy bread at $*"); Registry.Attacks.add("throws mojibake at $*"); Registry.Attacks.add("throws floppy disks at $*"); Registry.Attacks.add("throws nails at $*"); Registry.Attacks.add("throws burning planets at $*"); Registry.Attacks.add("throws thorns at $*"); Registry.Attacks.add("throws skulls at $*"); Registry.Attacks.add("throws a fresh, unboxed copy of Windows Me at $*"); Registry.Attacks.add("casts fire at $*"); Registry.Attacks.add("casts ice at $*"); Registry.Attacks.add("casts death at $*"); Registry.Attacks.add("casts " + Colors.BOLD + "DEATH" + Colors.BOLD + " at $*"); Registry.Attacks.add("casts poison at $*"); Registry.Attacks.add("casts stupid at $*"); Registry.Attacks.add("attacks $* with knives"); Registry.Attacks.add("attacks $* with idiots from #freenode"); Registry.Attacks.add("attacks $* with an army of trolls"); Registry.Attacks.add("attacks $* with oper abuse"); Registry.Attacks.add("attacks $* with confusingly bad english"); Registry.Attacks.add("attacks $* with Windows Me"); Registry.Attacks.add("attacks $* with Quicktime for Windows"); Registry.Attacks.add("attacks $* with ???"); Registry.Attacks.add("attacks $* with segmentation faults"); Registry.Attacks.add("attacks $* with relentless spyware"); Registry.Attacks.add("attacks $* with NSA spies"); Registry.Attacks.add("attacks $* with tracking devices"); Registry.Attacks.add("attacks $* with a botnet"); } public static void registerEightball() { Registry.Eightball.add("Hmm.. not today"); Registry.Eightball.add("YES!"); Registry.Eightball.add("Maybe"); Registry.Eightball.add("Nope."); Registry.Eightball.add("Sources say no."); Registry.Eightball.add("Definitely"); Registry.Eightball.add("I have my doubts"); Registry.Eightball.add("Signs say yes"); Registry.Eightball.add("Cannot predict now"); Registry.Eightball.add("It is certain"); Registry.Eightball.add("Sure"); Registry.Eightball.add("Outlook decent"); Registry.Eightball.add("Very doubtful"); Registry.Eightball.add("Perhaps now is not a good time to tell you"); Registry.Eightball.add("Concentrate and ask again"); Registry.Eightball.add("Forget about it"); Registry.Eightball.add("Don't count on it"); } public static void addDir(String s) throws IOException { try { Field field = ClassLoader.class.getDeclaredField("usr_paths"); field.setAccessible(true); String[] paths = (String[]) field.get(null); for (String path : paths) { if (s.equals(path)) { return; } } String[] tmp = new String[paths.length + 1]; System.arraycopy(paths, 0, tmp, 0, paths.length); tmp[paths.length] = s; field.set(null, tmp); System.setProperty("java.library.path", System.getProperty("java.library.path") + File.pathSeparator + s); } catch (IllegalAccessException e) { throw new IOException("Failed to get permissions to set library path"); } catch (NoSuchFieldException e) { throw new IOException("Failed to get field handle to set library path"); } } public static void initializeMessageQueue(){ for(NetProperty network:Registry.NetworkName) { class MessageQueue implements Runnable { @Override public void run() { try { TimeUnit.SECONDS.sleep(30); } catch (InterruptedException c) { } while (true) { try { if (Registry.MessageQueue.size() > 0 && network.getNetwork().equals(Registry.MessageQueue.get(0).getNetwork())) { Registry.MessageQueue.get(0).getNetwork().sendRaw().rawLine(Registry.MessageQueue.get(0).getProperty()); Registry.MessageQueue.remove(0); TimeUnit.MILLISECONDS.sleep(900); } TimeUnit.MILLISECONDS.sleep(100); } catch (Exception e) { } } } } Registry.threadPool.execute(new MessageQueue()); } } public static void initalizeBanQueue() { class BanQueue implements Runnable { @Override public void run() { try { TimeUnit.SECONDS.sleep(120); } catch (InterruptedException c) { // ignored } while (true) { try { for (Record banRecord : DatabaseUtils.getBans()) { try { if (System.currentTimeMillis() >= banRecord.getValue(BANS.TIME) + banRecord.getValue(BANS.INIT)) { PircBotX networkObject = IRCUtils.getBotByNetworkName(banRecord.getValue(BANS.NETWORK)); IRCUtils.setMode(IRCUtils.getChannelbyName(networkObject, banRecord.getValue(BANS.CHANNEL)), networkObject, "-" + banRecord.getValue(BANS.PROPERTY), banRecord.getValue(BANS.HOSTMASK)); DatabaseUtils.removeBan(banRecord.getValue(BANS.NETWORK), banRecord.getValue(BANS.CHANNEL), banRecord.getValue(BANS.HOSTMASK), banRecord.getValue(BANS.ISMUTE)); } } catch (IllegalArgumentException | NullPointerException e) { // ignored } } TimeUnit.SECONDS.sleep(120); } catch (InterruptedException e) { e.printStackTrace(); } } } } Registry.threadPool.execute(new BanQueue()); } }
Adds an attack (Resolves #63)
src/main/java/com/techcavern/wavetact/utils/LoadUtils.java
Adds an attack (Resolves #63)
<ide><path>rc/main/java/com/techcavern/wavetact/utils/LoadUtils.java <ide> Registry.Attacks.add("throws hammers at $*"); <ide> Registry.Attacks.add("throws spears at $*"); <ide> Registry.Attacks.add("throws spikes at $*"); <add> Registry.Attacks.add("throws $* into a burning building"); <ide> Registry.Attacks.add("throws sharp things at $*"); <ide> Registry.Attacks.add("throws moldy bread at $*"); <ide> Registry.Attacks.add("throws mojibake at $*");
Java
apache-2.0
9ba9524bb45e527da91d074d1d9209797f292009
0
apache/syncope,ilgrosso/syncope,ilgrosso/syncope,ilgrosso/syncope,apache/syncope,apache/syncope,apache/syncope,ilgrosso/syncope
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.syncope.client.console.rest; import java.util.List; import java.util.Optional; import org.apache.syncope.common.lib.to.PagedResult; import org.apache.syncope.common.lib.to.UserRequest; import org.apache.syncope.common.lib.to.UserRequestForm; import org.apache.syncope.common.rest.api.beans.UserRequestFormQuery; import org.apache.syncope.common.rest.api.beans.UserRequestQuery; import org.apache.wicket.extensions.markup.html.repeater.util.SortParam; import org.apache.syncope.common.rest.api.service.UserRequestService; public class UserRequestRestClient extends BaseRestClient { private static final long serialVersionUID = -4785231164900813921L; public static int countUserRequests() { return getService(UserRequestService.class). list(new UserRequestQuery.Builder().page(1).size(0).build()). getTotalCount(); } public static List<UserRequest> getUserRequests(final int page, final int size, final SortParam<String> sort) { return getService(UserRequestService.class). list(new UserRequestQuery.Builder().page(page).size(size).orderBy(toOrderBy(sort)).build()). getResult(); } public static void cancelRequest(final String executionId, final String reason) { getService(UserRequestService.class).cancel(executionId, reason); } public static int countForms() { return getService(UserRequestService.class). getForms(new UserRequestFormQuery.Builder().page(1).size(0).build()). getTotalCount(); } public static List<UserRequestForm> getForms(final int page, final int size, final SortParam<String> sort) { return getService(UserRequestService.class). getForms(new UserRequestFormQuery.Builder().page(page).size(size).orderBy(toOrderBy(sort)).build()). getResult(); } public static Optional<UserRequestForm> getForm(final String userKey) { PagedResult<UserRequestForm> forms = getService(UserRequestService.class). getForms(new UserRequestFormQuery.Builder().user(userKey).page(1).size(1).build()); UserRequestForm form = forms.getResult().isEmpty() ? null : forms.getResult().get(0); return Optional.ofNullable(form); } public static UserRequestForm claimForm(final String taskKey) { return getService(UserRequestService.class).claimForm(taskKey); } public static UserRequestForm unclaimForm(final String taskKey) { return getService(UserRequestService.class).unclaimForm(taskKey); } public static void submitForm(final UserRequestForm form) { getService(UserRequestService.class).submitForm(form); } }
ext/flowable/client-console/src/main/java/org/apache/syncope/client/console/rest/UserRequestRestClient.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.syncope.client.console.rest; import java.util.List; import java.util.Optional; import org.apache.syncope.common.lib.to.PagedResult; import org.apache.syncope.common.lib.to.UserRequest; import org.apache.syncope.common.lib.to.UserRequestForm; import org.apache.syncope.common.rest.api.beans.UserRequestFormQuery; import org.apache.syncope.common.rest.api.beans.UserRequestQuery; import org.apache.wicket.extensions.markup.html.repeater.util.SortParam; import org.apache.syncope.common.rest.api.service.UserRequestService; public class UserRequestRestClient extends BaseRestClient { private static final long serialVersionUID = -4785231164900813921L; public static int countUserRequests() { return getService(UserRequestService.class). list(new UserRequestQuery.Builder().page(1).size(0).build()). getTotalCount(); } public static List<UserRequest> getUserRequests(final int page, final int size, final SortParam<String> sort) { return getService(UserRequestService.class). list(new UserRequestQuery.Builder().page(page).size(size).orderBy(toOrderBy(sort)).build()). getResult(); } public static void cancelRequest(final String executionId, final String reason) { getService(UserRequestService.class).cancel(executionId, reason); } public static int countForms() { return getService(UserRequestService.class). getForms(new UserRequestFormQuery.Builder().page(1).size(0).build()). getTotalCount(); } public static List<UserRequestForm> getForms(final int page, final int size, final SortParam<String> sort) { return getService(UserRequestService.class). getForms(new UserRequestFormQuery.Builder().page(page).size(size).orderBy(toOrderBy(sort)).build()). getResult(); } public static Optional<UserRequestForm> getForm(final String userKey) { PagedResult<UserRequestForm> forms = getService(UserRequestService.class). getForms(new UserRequestFormQuery.Builder().user(userKey).page(1).size(0).build()); UserRequestForm form = forms.getResult().isEmpty() ? null : forms.getResult().get(0); return Optional.ofNullable(form); } public static UserRequestForm claimForm(final String taskKey) { return getService(UserRequestService.class).claimForm(taskKey); } public static UserRequestForm unclaimForm(final String taskKey) { return getService(UserRequestService.class).unclaimForm(taskKey); } public static void submitForm(final UserRequestForm form) { getService(UserRequestService.class).submitForm(form); } }
fixed query on console rest client to retrieve form for a given user
ext/flowable/client-console/src/main/java/org/apache/syncope/client/console/rest/UserRequestRestClient.java
fixed query on console rest client to retrieve form for a given user
<ide><path>xt/flowable/client-console/src/main/java/org/apache/syncope/client/console/rest/UserRequestRestClient.java <ide> <ide> public static Optional<UserRequestForm> getForm(final String userKey) { <ide> PagedResult<UserRequestForm> forms = getService(UserRequestService.class). <del> getForms(new UserRequestFormQuery.Builder().user(userKey).page(1).size(0).build()); <add> getForms(new UserRequestFormQuery.Builder().user(userKey).page(1).size(1).build()); <ide> UserRequestForm form = forms.getResult().isEmpty() <ide> ? null <ide> : forms.getResult().get(0);
Java
apache-2.0
db555b7c3d151653b613dc807680b9bad0c750d8
0
java110/MicroCommunity,java110/MicroCommunity,java110/MicroCommunity,java110/MicroCommunity
package com.java110.api.listener.fee; import com.alibaba.fastjson.JSONArray; import com.alibaba.fastjson.JSONObject; import com.java110.api.listener.AbstractServiceApiDataFlowListener; import com.java110.core.annotation.Java110Listener; import com.java110.core.context.DataFlowContext; import com.java110.core.smo.fee.IFeeConfigInnerServiceSMO; import com.java110.core.smo.fee.IFeeInnerServiceSMO; import com.java110.core.smo.hardwareAdapation.ICarInoutInnerServiceSMO; import com.java110.core.smo.room.IRoomInnerServiceSMO; import com.java110.dto.FeeConfigDto; import com.java110.dto.FeeDto; import com.java110.dto.RoomDto; import com.java110.dto.hardwareAdapation.CarInoutDto; import com.java110.entity.center.AppService; import com.java110.entity.order.Orders; import com.java110.event.service.api.ServiceDataFlowEvent; import com.java110.utils.constant.BusinessTypeConstant; import com.java110.utils.constant.CommonConstant; import com.java110.utils.constant.FeeTypeConstant; import com.java110.utils.constant.ResponseConstant; import com.java110.utils.constant.ServiceCodeConstant; import com.java110.utils.exception.ListenerExecuteException; import com.java110.utils.util.Assert; import com.java110.utils.util.BeanConvertUtil; import com.java110.utils.util.DateUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpMethod; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import java.util.Calendar; import java.util.Date; import java.util.List; import java.util.Map; /** * @ClassName PayFeeListener * @Description TODO 预交费(临时停车费)侦听 * @Author wuxw * @Date 2019/6/3 13:46 * @Version 1.0 * add by wuxw 2019/6/3 **/ @Java110Listener("payFeePreTempCarInoutListener") public class PayFeePreTempCarInoutListener extends AbstractServiceApiDataFlowListener { private static Logger logger = LoggerFactory.getLogger(PayFeePreTempCarInoutListener.class); @Autowired private IFeeInnerServiceSMO feeInnerServiceSMOImpl; @Autowired private IRoomInnerServiceSMO roomInnerServiceSMOImpl; @Autowired private ICarInoutInnerServiceSMO carInoutInnerServiceSMOImpl; @Autowired private IFeeConfigInnerServiceSMO feeConfigInnerServiceSMOImpl; @Override public String getServiceCode() { return ServiceCodeConstant.SERVICE_CODE_PAY_FEE_PRE_TEMP_CAR_INOUT; } @Override public HttpMethod getHttpMethod() { return HttpMethod.POST; } @Override public void soService(ServiceDataFlowEvent event) { logger.debug("ServiceDataFlowEvent : {}", event); DataFlowContext dataFlowContext = event.getDataFlowContext(); AppService service = event.getAppService(); String paramIn = dataFlowContext.getReqData(); //校验数据 validate(paramIn); JSONObject paramObj = JSONObject.parseObject(paramIn); HttpHeaders header = new HttpHeaders(); dataFlowContext.getRequestCurrentHeaders().put(CommonConstant.HTTP_ORDER_TYPE_CD, "D"); JSONArray businesses = new JSONArray(); paramObj.put("cycles", 1); //添加单元信息 businesses.add(addFeeDetail(paramObj, dataFlowContext)); businesses.add(modifyFee(paramObj, dataFlowContext)); businesses.add(modifyCarInout(paramObj, dataFlowContext)); JSONObject paramInObj = super.restToCenterProtocol(businesses, dataFlowContext.getRequestCurrentHeaders()); //将 rest header 信息传递到下层服务中去 super.freshHttpHeader(header, dataFlowContext.getRequestCurrentHeaders()); ResponseEntity<String> responseEntity = this.callService(dataFlowContext, service.getServiceCode(), paramInObj); if (responseEntity.getStatusCode() != HttpStatus.OK) { dataFlowContext.setResponseEntity(responseEntity); return; } JSONObject paramOut = JSONObject.parseObject(responseEntity.getBody()); paramOut.put("receivableAmount", paramObj.getString("receivableAmount")); responseEntity = new ResponseEntity<>(paramOut.toJSONString(), HttpStatus.OK); dataFlowContext.setResponseEntity(responseEntity); } private JSONObject modifyCarInout(JSONObject reqJson, DataFlowContext context) { FeeDto feeDto = (FeeDto) reqJson.get("feeInfo"); CarInoutDto tempCarInoutDto = new CarInoutDto(); tempCarInoutDto.setCommunityId(reqJson.getString("communityId")); tempCarInoutDto.setInoutId(feeDto.getPayerObjId()); List<CarInoutDto> carInoutDtos = carInoutInnerServiceSMOImpl.queryCarInouts(tempCarInoutDto); Assert.listOnlyOne(carInoutDtos, "根据费用信息反差车辆进场记录未查到 或查到多条"); CarInoutDto carInoutDto = carInoutDtos.get(0); JSONObject business = JSONObject.parseObject("{\"datas\":{}}"); business.put(CommonConstant.HTTP_BUSINESS_TYPE_CD, BusinessTypeConstant.BUSINESS_TYPE_UPDATE_CAR_INOUT); business.put(CommonConstant.HTTP_SEQ, DEFAULT_SEQ); business.put(CommonConstant.HTTP_INVOKE_MODEL, CommonConstant.HTTP_INVOKE_MODEL_S); JSONObject businessCarInout = new JSONObject(); businessCarInout.putAll(BeanConvertUtil.beanCovertMap(carInoutDto)); businessCarInout.put("state", "100400"); //计算 应收金额 business.getJSONObject(CommonConstant.HTTP_BUSINESS_DATAS).put("businessCarInout", businessCarInout); return business; } /** * 刷入order信息 * * @param orders 订单信息 * @param headers 头部信息 */ protected void freshOrderProtocol(JSONObject orders, Map<String, String> headers) { super.freshOrderProtocol(orders, headers); orders.put("orderProcess", Orders.ORDER_PROCESS_ORDER_PRE_SUBMIT); } /** * 添加费用明细信息 * * @param paramInJson 接口调用放传入入参 * @param dataFlowContext 数据上下文 * @return 订单服务能够接受的报文 */ private JSONObject addFeeDetail(JSONObject paramInJson, DataFlowContext dataFlowContext) { JSONObject business = JSONObject.parseObject("{\"datas\":{}}"); business.put(CommonConstant.HTTP_BUSINESS_TYPE_CD, BusinessTypeConstant.BUSINESS_TYPE_SAVE_FEE_DETAIL); business.put(CommonConstant.HTTP_SEQ, DEFAULT_SEQ); business.put(CommonConstant.HTTP_INVOKE_MODEL, CommonConstant.HTTP_INVOKE_MODEL_S); JSONObject businessFeeDetail = new JSONObject(); businessFeeDetail.putAll(paramInJson); businessFeeDetail.put("detailId", "-1"); businessFeeDetail.put("primeRate", "1.00"); //计算 应收金额 FeeDto feeDto = new FeeDto(); feeDto.setFeeId(paramInJson.getString("feeId")); feeDto.setCommunityId(paramInJson.getString("communityId")); List<FeeDto> feeDtos = feeInnerServiceSMOImpl.queryFees(feeDto); if (feeDtos == null || feeDtos.size() != 1) { throw new ListenerExecuteException(ResponseConstant.RESULT_CODE_ERROR, "查询费用信息失败,未查到数据或查到多条数据"); } feeDto = feeDtos.get(0); paramInJson.put("feeInfo", feeDto); FeeConfigDto feeConfigDto = new FeeConfigDto(); feeConfigDto.setFeeTypeCd(feeDto.getFeeTypeCd()); feeConfigDto.setCommunityId(feeDto.getCommunityId()); List<FeeConfigDto> feeConfigDtos = feeConfigInnerServiceSMOImpl.queryFeeConfigs(feeConfigDto); if (feeConfigDtos == null || feeConfigDtos.size() != 1) { throw new ListenerExecuteException(ResponseConstant.RESULT_CODE_ERROR, "未查到费用配置信息,查询多条数据"); } feeConfigDto = feeConfigDtos.get(0); Date nowTime = new Date(); long diff = nowTime.getTime() - feeDto.getStartTime().getTime(); long nd = 1000 * 24 * 60 * 60;// 一天的毫秒数 long nh = 1000 * 60 * 60;// 一小时的毫秒数 long nm = 1000 * 60;// 一分钟的毫秒数 double day = 0; double hour = 0; double min = 0; day = diff / nd;// 计算差多少天 hour = diff % nd / nh + day * 24;// 计算差多少小时 min = diff % nd % nh / nm + day * 24 * 60;// 计算差多少分钟 double money = 0.00; double newHour = hour; if (min > 0) { //一小时超过 newHour += 1; } if (newHour <= 2) { money = Double.parseDouble(feeConfigDto.getAdditionalAmount()); } else { double lastHour = newHour - 2; money = lastHour * Double.parseDouble(feeConfigDto.getSquarePrice()) + Double.parseDouble(feeConfigDto.getAdditionalAmount()); } double receivableAmount = money; businessFeeDetail.put("receivableAmount", receivableAmount); business.getJSONObject(CommonConstant.HTTP_BUSINESS_DATAS).put("businessFeeDetail", businessFeeDetail); paramInJson.put("receivableAmount", receivableAmount); return business; } /** * 修改费用信息 * * @param paramInJson 接口调用放传入入参 * @param dataFlowContext 数据上下文 * @return 订单服务能够接受的报文 */ private JSONObject modifyFee(JSONObject paramInJson, DataFlowContext dataFlowContext) { JSONObject business = JSONObject.parseObject("{\"datas\":{}}"); business.put(CommonConstant.HTTP_BUSINESS_TYPE_CD, BusinessTypeConstant.BUSINESS_TYPE_UPDATE_FEE_INFO); business.put(CommonConstant.HTTP_SEQ, DEFAULT_SEQ + 1); business.put(CommonConstant.HTTP_INVOKE_MODEL, CommonConstant.HTTP_INVOKE_MODEL_S); JSONObject businessFee = new JSONObject(); FeeDto feeInfo = (FeeDto) paramInJson.get("feeInfo"); Map feeMap = BeanConvertUtil.beanCovertMap(feeInfo); feeMap.put("startTime", DateUtil.getFormatTimeString(feeInfo.getStartTime(), DateUtil.DATE_FORMATE_STRING_A)); feeMap.put("endTime", DateUtil.getFormatTimeString(new Date(), DateUtil.DATE_FORMATE_STRING_A)); feeMap.put("total", paramInJson.getString("receivableAmount")); businessFee.putAll(feeMap); business.getJSONObject(CommonConstant.HTTP_BUSINESS_DATAS).put("businessFee", businessFee); return business; } /** * 数据校验 * * @param paramIn "communityId": "7020181217000001", * "memberId": "3456789", * "memberTypeCd": "390001200001" */ private void validate(String paramIn) { Assert.jsonObjectHaveKey(paramIn, "communityId", "请求报文中未包含communityId节点"); Assert.jsonObjectHaveKey(paramIn, "receivedAmount", "请求报文中未包含receivedAmount节点"); Assert.jsonObjectHaveKey(paramIn, "feeId", "请求报文中未包含feeId节点"); JSONObject paramInObj = JSONObject.parseObject(paramIn); Assert.hasLength(paramInObj.getString("communityId"), "小区ID不能为空"); Assert.hasLength(paramInObj.getString("receivedAmount"), "实收金额不能为空"); Assert.hasLength(paramInObj.getString("feeId"), "费用ID不能为空"); } @Override public int getOrder() { return DEFAULT_ORDER; } public IFeeInnerServiceSMO getFeeInnerServiceSMOImpl() { return feeInnerServiceSMOImpl; } public void setFeeInnerServiceSMOImpl(IFeeInnerServiceSMO feeInnerServiceSMOImpl) { this.feeInnerServiceSMOImpl = feeInnerServiceSMOImpl; } public IFeeConfigInnerServiceSMO getFeeConfigInnerServiceSMOImpl() { return feeConfigInnerServiceSMOImpl; } public void setFeeConfigInnerServiceSMOImpl(IFeeConfigInnerServiceSMO feeConfigInnerServiceSMOImpl) { this.feeConfigInnerServiceSMOImpl = feeConfigInnerServiceSMOImpl; } public IRoomInnerServiceSMO getRoomInnerServiceSMOImpl() { return roomInnerServiceSMOImpl; } public void setRoomInnerServiceSMOImpl(IRoomInnerServiceSMO roomInnerServiceSMOImpl) { this.roomInnerServiceSMOImpl = roomInnerServiceSMOImpl; } }
Api/src/main/java/com/java110/api/listener/fee/PayFeePreTempCarInoutListener.java
package com.java110.api.listener.fee; import com.alibaba.fastjson.JSONArray; import com.alibaba.fastjson.JSONObject; import com.java110.api.listener.AbstractServiceApiDataFlowListener; import com.java110.core.annotation.Java110Listener; import com.java110.core.context.DataFlowContext; import com.java110.core.smo.fee.IFeeConfigInnerServiceSMO; import com.java110.core.smo.fee.IFeeInnerServiceSMO; import com.java110.core.smo.hardwareAdapation.ICarInoutInnerServiceSMO; import com.java110.core.smo.room.IRoomInnerServiceSMO; import com.java110.dto.FeeConfigDto; import com.java110.dto.FeeDto; import com.java110.dto.RoomDto; import com.java110.dto.hardwareAdapation.CarInoutDto; import com.java110.entity.center.AppService; import com.java110.entity.order.Orders; import com.java110.event.service.api.ServiceDataFlowEvent; import com.java110.utils.constant.BusinessTypeConstant; import com.java110.utils.constant.CommonConstant; import com.java110.utils.constant.FeeTypeConstant; import com.java110.utils.constant.ResponseConstant; import com.java110.utils.constant.ServiceCodeConstant; import com.java110.utils.exception.ListenerExecuteException; import com.java110.utils.util.Assert; import com.java110.utils.util.BeanConvertUtil; import com.java110.utils.util.DateUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpMethod; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import java.util.Calendar; import java.util.Date; import java.util.List; import java.util.Map; /** * @ClassName PayFeeListener * @Description TODO 预交费(临时停车费)侦听 * @Author wuxw * @Date 2019/6/3 13:46 * @Version 1.0 * add by wuxw 2019/6/3 **/ @Java110Listener("payFeePreTempCarInoutListener") public class PayFeePreTempCarInoutListener extends AbstractServiceApiDataFlowListener { private static Logger logger = LoggerFactory.getLogger(PayFeePreTempCarInoutListener.class); @Autowired private IFeeInnerServiceSMO feeInnerServiceSMOImpl; @Autowired private IRoomInnerServiceSMO roomInnerServiceSMOImpl; @Autowired private ICarInoutInnerServiceSMO carInoutInnerServiceSMOImpl; @Autowired private IFeeConfigInnerServiceSMO feeConfigInnerServiceSMOImpl; @Override public String getServiceCode() { return ServiceCodeConstant.SERVICE_CODE_PAY_FEE_PRE_TEMP_CAR_INOUT; } @Override public HttpMethod getHttpMethod() { return HttpMethod.POST; } @Override public void soService(ServiceDataFlowEvent event) { logger.debug("ServiceDataFlowEvent : {}", event); DataFlowContext dataFlowContext = event.getDataFlowContext(); AppService service = event.getAppService(); String paramIn = dataFlowContext.getReqData(); //校验数据 validate(paramIn); JSONObject paramObj = JSONObject.parseObject(paramIn); HttpHeaders header = new HttpHeaders(); dataFlowContext.getRequestCurrentHeaders().put(CommonConstant.HTTP_ORDER_TYPE_CD, "D"); JSONArray businesses = new JSONArray(); //添加单元信息 businesses.add(addFeeDetail(paramObj, dataFlowContext)); businesses.add(modifyFee(paramObj, dataFlowContext)); businesses.add(modifyCarInout(paramObj, dataFlowContext)); JSONObject paramInObj = super.restToCenterProtocol(businesses, dataFlowContext.getRequestCurrentHeaders()); //将 rest header 信息传递到下层服务中去 super.freshHttpHeader(header, dataFlowContext.getRequestCurrentHeaders()); ResponseEntity<String> responseEntity = this.callService(dataFlowContext, service.getServiceCode(), paramInObj); if (responseEntity.getStatusCode() != HttpStatus.OK) { dataFlowContext.setResponseEntity(responseEntity); return; } JSONObject paramOut = JSONObject.parseObject(responseEntity.getBody()); paramOut.put("receivableAmount", paramObj.getString("receivableAmount")); responseEntity = new ResponseEntity<>(paramOut.toJSONString(), HttpStatus.OK); dataFlowContext.setResponseEntity(responseEntity); } private JSONObject modifyCarInout(JSONObject reqJson, DataFlowContext context) { FeeDto feeDto = (FeeDto) reqJson.get("feeInfo"); CarInoutDto tempCarInoutDto = new CarInoutDto(); tempCarInoutDto.setCommunityId(reqJson.getString("communityId")); tempCarInoutDto.setInoutId(feeDto.getPayerObjId()); List<CarInoutDto> carInoutDtos = carInoutInnerServiceSMOImpl.queryCarInouts(tempCarInoutDto); Assert.listOnlyOne(carInoutDtos, "根据费用信息反差车辆进场记录未查到 或查到多条"); CarInoutDto carInoutDto = carInoutDtos.get(0); JSONObject business = JSONObject.parseObject("{\"datas\":{}}"); business.put(CommonConstant.HTTP_BUSINESS_TYPE_CD, BusinessTypeConstant.BUSINESS_TYPE_UPDATE_CAR_INOUT); business.put(CommonConstant.HTTP_SEQ, DEFAULT_SEQ); business.put(CommonConstant.HTTP_INVOKE_MODEL, CommonConstant.HTTP_INVOKE_MODEL_S); JSONObject businessCarInout = new JSONObject(); businessCarInout.putAll(BeanConvertUtil.beanCovertMap(carInoutDto)); businessCarInout.put("state", "100400"); //计算 应收金额 business.getJSONObject(CommonConstant.HTTP_BUSINESS_DATAS).put("businessCarInout", businessCarInout); return business; } /** * 刷入order信息 * * @param orders 订单信息 * @param headers 头部信息 */ protected void freshOrderProtocol(JSONObject orders, Map<String, String> headers) { super.freshOrderProtocol(orders, headers); orders.put("orderProcess", Orders.ORDER_PROCESS_ORDER_PRE_SUBMIT); } /** * 添加费用明细信息 * * @param paramInJson 接口调用放传入入参 * @param dataFlowContext 数据上下文 * @return 订单服务能够接受的报文 */ private JSONObject addFeeDetail(JSONObject paramInJson, DataFlowContext dataFlowContext) { JSONObject business = JSONObject.parseObject("{\"datas\":{}}"); business.put(CommonConstant.HTTP_BUSINESS_TYPE_CD, BusinessTypeConstant.BUSINESS_TYPE_SAVE_FEE_DETAIL); business.put(CommonConstant.HTTP_SEQ, DEFAULT_SEQ); business.put(CommonConstant.HTTP_INVOKE_MODEL, CommonConstant.HTTP_INVOKE_MODEL_S); JSONObject businessFeeDetail = new JSONObject(); businessFeeDetail.putAll(paramInJson); businessFeeDetail.put("detailId", "-1"); businessFeeDetail.put("primeRate", "1.00"); //计算 应收金额 FeeDto feeDto = new FeeDto(); feeDto.setFeeId(paramInJson.getString("feeId")); feeDto.setCommunityId(paramInJson.getString("communityId")); List<FeeDto> feeDtos = feeInnerServiceSMOImpl.queryFees(feeDto); if (feeDtos == null || feeDtos.size() != 1) { throw new ListenerExecuteException(ResponseConstant.RESULT_CODE_ERROR, "查询费用信息失败,未查到数据或查到多条数据"); } feeDto = feeDtos.get(0); paramInJson.put("feeInfo", feeDto); FeeConfigDto feeConfigDto = new FeeConfigDto(); feeConfigDto.setFeeTypeCd(feeDto.getFeeTypeCd()); feeConfigDto.setCommunityId(feeDto.getCommunityId()); List<FeeConfigDto> feeConfigDtos = feeConfigInnerServiceSMOImpl.queryFeeConfigs(feeConfigDto); if (feeConfigDtos == null || feeConfigDtos.size() != 1) { throw new ListenerExecuteException(ResponseConstant.RESULT_CODE_ERROR, "未查到费用配置信息,查询多条数据"); } feeConfigDto = feeConfigDtos.get(0); Date nowTime = new Date(); long diff = nowTime.getTime() - feeDto.getStartTime().getTime(); long nd = 1000 * 24 * 60 * 60;// 一天的毫秒数 long nh = 1000 * 60 * 60;// 一小时的毫秒数 long nm = 1000 * 60;// 一分钟的毫秒数 double day = 0; double hour = 0; double min = 0; day = diff / nd;// 计算差多少天 hour = diff % nd / nh + day * 24;// 计算差多少小时 min = diff % nd % nh / nm + day * 24 * 60;// 计算差多少分钟 double money = 0.00; double newHour = hour; if (min > 0) { //一小时超过 newHour += 1; } if (newHour <= 2) { money = Double.parseDouble(feeConfigDto.getAdditionalAmount()); } else { double lastHour = newHour - 2; money = lastHour * Double.parseDouble(feeConfigDto.getSquarePrice()) + Double.parseDouble(feeConfigDto.getAdditionalAmount()); } double receivableAmount = money; businessFeeDetail.put("receivableAmount", receivableAmount); business.getJSONObject(CommonConstant.HTTP_BUSINESS_DATAS).put("businessFeeDetail", businessFeeDetail); paramInJson.put("receivableAmount", receivableAmount); return business; } /** * 修改费用信息 * * @param paramInJson 接口调用放传入入参 * @param dataFlowContext 数据上下文 * @return 订单服务能够接受的报文 */ private JSONObject modifyFee(JSONObject paramInJson, DataFlowContext dataFlowContext) { JSONObject business = JSONObject.parseObject("{\"datas\":{}}"); business.put(CommonConstant.HTTP_BUSINESS_TYPE_CD, BusinessTypeConstant.BUSINESS_TYPE_UPDATE_FEE_INFO); business.put(CommonConstant.HTTP_SEQ, DEFAULT_SEQ + 1); business.put(CommonConstant.HTTP_INVOKE_MODEL, CommonConstant.HTTP_INVOKE_MODEL_S); JSONObject businessFee = new JSONObject(); FeeDto feeInfo = (FeeDto) paramInJson.get("feeInfo"); Map feeMap = BeanConvertUtil.beanCovertMap(feeInfo); feeMap.put("startTime", DateUtil.getFormatTimeString(feeInfo.getStartTime(), DateUtil.DATE_FORMATE_STRING_A)); feeMap.put("endTime", DateUtil.getFormatTimeString(new Date(), DateUtil.DATE_FORMATE_STRING_A)); feeMap.put("total", paramInJson.getString("receivableAmount")); businessFee.putAll(feeMap); business.getJSONObject(CommonConstant.HTTP_BUSINESS_DATAS).put("businessFee", businessFee); return business; } /** * 数据校验 * * @param paramIn "communityId": "7020181217000001", * "memberId": "3456789", * "memberTypeCd": "390001200001" */ private void validate(String paramIn) { Assert.jsonObjectHaveKey(paramIn, "communityId", "请求报文中未包含communityId节点"); Assert.jsonObjectHaveKey(paramIn, "receivedAmount", "请求报文中未包含receivedAmount节点"); Assert.jsonObjectHaveKey(paramIn, "feeId", "请求报文中未包含feeId节点"); JSONObject paramInObj = JSONObject.parseObject(paramIn); Assert.hasLength(paramInObj.getString("communityId"), "小区ID不能为空"); Assert.hasLength(paramInObj.getString("receivedAmount"), "实收金额不能为空"); Assert.hasLength(paramInObj.getString("feeId"), "费用ID不能为空"); } @Override public int getOrder() { return DEFAULT_ORDER; } public IFeeInnerServiceSMO getFeeInnerServiceSMOImpl() { return feeInnerServiceSMOImpl; } public void setFeeInnerServiceSMOImpl(IFeeInnerServiceSMO feeInnerServiceSMOImpl) { this.feeInnerServiceSMOImpl = feeInnerServiceSMOImpl; } public IFeeConfigInnerServiceSMO getFeeConfigInnerServiceSMOImpl() { return feeConfigInnerServiceSMOImpl; } public void setFeeConfigInnerServiceSMOImpl(IFeeConfigInnerServiceSMO feeConfigInnerServiceSMOImpl) { this.feeConfigInnerServiceSMOImpl = feeConfigInnerServiceSMOImpl; } public IRoomInnerServiceSMO getRoomInnerServiceSMOImpl() { return roomInnerServiceSMOImpl; } public void setRoomInnerServiceSMOImpl(IRoomInnerServiceSMO roomInnerServiceSMOImpl) { this.roomInnerServiceSMOImpl = roomInnerServiceSMOImpl; } }
加入 cycles 写死为1期
Api/src/main/java/com/java110/api/listener/fee/PayFeePreTempCarInoutListener.java
加入 cycles 写死为1期
<ide><path>pi/src/main/java/com/java110/api/listener/fee/PayFeePreTempCarInoutListener.java <ide> HttpHeaders header = new HttpHeaders(); <ide> dataFlowContext.getRequestCurrentHeaders().put(CommonConstant.HTTP_ORDER_TYPE_CD, "D"); <ide> JSONArray businesses = new JSONArray(); <del> <add> paramObj.put("cycles", 1); <ide> //添加单元信息 <ide> businesses.add(addFeeDetail(paramObj, dataFlowContext)); <ide> businesses.add(modifyFee(paramObj, dataFlowContext));
Java
apache-2.0
ababd55cdcb6ee2592f8218fc0b74664b38152a5
0
genedelisa/rockymusic
package com.rockhoppertech.music.fx.cmn; /* * #%L * rockymusic-fx * %% * Copyright (C) 1996 - 2013 Rockhopper Technologies * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import javafx.application.Application; import javafx.beans.value.ChangeListener; import javafx.beans.value.ObservableValue; import javafx.geometry.Insets; import javafx.scene.Scene; import javafx.scene.SceneBuilder; import javafx.scene.control.Button; import javafx.scene.control.ButtonBuilder; import javafx.scene.control.ComboBox; import javafx.scene.control.ScrollPane; import javafx.scene.control.TextArea; import javafx.scene.control.TextAreaBuilder; import javafx.scene.control.TextField; import javafx.scene.control.TextFieldBuilder; import javafx.scene.layout.AnchorPane; import javafx.scene.layout.AnchorPaneBuilder; import javafx.scene.layout.BorderPane; import javafx.scene.layout.BorderPaneBuilder; import javafx.scene.layout.HBox; import javafx.scene.layout.HBoxBuilder; import javafx.scene.layout.Pane; import javafx.scene.layout.VBox; import javafx.scene.layout.VBoxBuilder; import javafx.scene.paint.Color; import javafx.stage.Screen; import javafx.stage.Stage; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.rockhoppertech.music.fx.cmn.model.StaffModel; import com.rockhoppertech.music.midi.js.MIDITrack; import com.rockhoppertech.music.midi.js.MIDITrackBuilder; /** * @author <a href="http://genedelisa.com/">Gene De Lisa</a> * */ public class NotationApp extends Application { private static final Logger logger = LoggerFactory .getLogger(NotationApp.class); Stage stage; Scene scene; Pane root; private NotationController controller; private StaffModel staffModel; // private NotationView view; // private NotationCanvas view; private StaffRegion view; // private StaffControl view; // private static ObservableList<MIDINote> tableDataList; public static void main(String[] args) throws Exception { launch(args); } @Override public void start(Stage stage) throws Exception { this.stage = stage; this.staffModel = new StaffModel(); MIDITrack track = MIDITrackBuilder .create() .noteString( "E5 F G Ab G# A B C C6 D Eb F# G A B C7 B4 Bf4 A4 Af4") .durations(1, 1.5, .5, .75, .25, .25) .sequential() .build(); System.out.println(track); this.staffModel.setTrack(track); // this.view = new NotationCanvas(this.staffModel); this.view = new StaffRegion(this.staffModel); this.controller = new NotationController(staffModel, view); this.view.drawShapes(); this.configureScene(); this.configureStage(); logger.debug("started"); } private void configureStage() { stage.setTitle("Music Notation"); // fullScreen(); stage.setScene(this.scene); controller.setStage(this.stage); stage.show(); } private void fullScreen() { // make it full screen stage.setX(0); stage.setY(0); stage.setWidth(Screen.getPrimary().getVisualBounds().getWidth()); stage.setHeight(Screen.getPrimary().getVisualBounds().getHeight()); } private void configureScene() { // TextField text = new TextField(); // text.setId("noteStringText"); // text.setEditable(true); // text.setPromptText("Enter a note string"); // controller.setTextField(text); TextArea textArea = new TextArea(); textArea.setId("noteStringText"); textArea.setEditable(true); textArea.setPromptText("Enter a note string"); textArea.setWrapText(true); // textArea.setText(MIDITrack //.getPitchesAsString(this.staffModel.getTrackProperty().get())); textArea.setText(this.staffModel.getTrackProperty().get().toBriefMIDIString("\n")); controller.setTextArea(textArea); Button b = ButtonBuilder.create() .id("noteStringButton") .style("-fx-font: 22 arial; -fx-base: #1055FF;") .text("Evaluate note string") .build(); controller.setNoteStringButton(b); Button pb = ButtonBuilder.create() .id("playButton") .style("-fx-font: 22 arial; -fx-base: #1055FF;") .text("Play") .build(); controller.setPlayButton(pb); final ComboBox<String> clefComboBox = new ComboBox<>(); clefComboBox.getItems().addAll( "Treble", "Bass", "Alto" ); clefComboBox.getSelectionModel().selectFirst(); clefComboBox.getSelectionModel().selectedItemProperty() .addListener(new ChangeListener<String>() { @Override public void changed( ObservableValue<? extends String> observable, String oldValue, String newValue) { } }); ; controller.setClefCombBox(clefComboBox); final ComboBox<Double> fontSizeComboBox = new ComboBox<>(); fontSizeComboBox.getItems().addAll(12d, 24d, 36d, 48d, 72d, 96d); fontSizeComboBox.getSelectionModel().select(3); controller.setFontSizeComboBox(fontSizeComboBox); FXTextAreaReceiver receiver = new FXTextAreaReceiver(); controller.addReceiver(receiver); HBox hbox = HBoxBuilder.create() .padding(new Insets(20)) .children(clefComboBox, fontSizeComboBox) .build(); HBox buttonbox = HBoxBuilder.create() .padding(new Insets(20)) .children(b, pb) .build(); VBox vbox = VBoxBuilder.create() .padding(new Insets(20)) .children(textArea, buttonbox, hbox, receiver) .build(); ScrollPane sp = new ScrollPane(); // sp.setContent(view.getCanvas()); sp.setContent(view); sp.setPrefSize(1300, 300); BorderPane bp = BorderPaneBuilder.create() .id("rootpane") // .padding(new Insets(20)) .style("-fx-padding: 30") .top(sp) .center(vbox) .build(); AnchorPane.setTopAnchor(bp, 10.0); AnchorPane.setBottomAnchor(bp, 10.0); AnchorPane.setLeftAnchor(bp, 10.0); AnchorPane.setRightAnchor(bp, 65.0); root = AnchorPaneBuilder.create() .children(bp) .build(); this.scene = SceneBuilder.create() .root(root) .fill(Color.web("#1030F0")) // .stylesheets("/styles/app2styles.css") .build(); } // private void configureSceneold() { // // // double fontSize = 24d; // // double fontSize = 36d; // double fontSize = 48d; // // double fontSize = 96d; // Font font = Font.loadFont( // FontApp.class.getResource("/fonts/Bravura.otf") // .toExternalForm(), // fontSize); // // FontMetrics fm; // // // Font font = new Font("Bravura", fontSize); // // Canvas canvas = new Canvas(1300, 250); // canvas.setOpacity(100); // // GraphicsContext gc = canvas.getGraphicsContext2D(); // gc.clearRect(0, 0, canvas.getWidth(), canvas.getHeight()); // gc.setFill(Color.WHITE); // gc.fillRect(0, 0, canvas.getWidth(), canvas.getHeight()); // gc.setFill(Color.BLACK); // gc.setTextBaseline(VPos.CENTER); // gc.setFont(font); // // double x = 50d; // double y = 125d; // // double yinc = fontSize / 10d + 1.2; // magic number since we lack // // font // // metrics. works for 48. too much // // for 24 or 36 // // this.yspacing = fontSize / 8d; // // gc.fillText(getGlyph("gClef"), x, y - (this.yspacing * 2d)); // // gc.fillText(getGlyph("fClef"), x, y - (this.yspacing * 6d)); // // this.trebleStaffBottom = y; // String staff = getGlyph("staff5Lines"); // for (double xx = x; xx < 1250; xx += fontSize / 2d) { // gc.fillText(staff, xx, y); // } // // gc.fillText(getGlyph("noteQuarterUp"), x += fontSize, y); // gc.fillText(getGlyph("accidentalFlat"), x += fontSize, y); // gc.fillText(getGlyph("noteQuarterDown"), x += fontSize / 3d, y); // gc.fillText(getGlyph("noteHalfUp"), x += fontSize, y); // // double yy = y; // // ascending scale // for (int i = 0; i < 12; i++, yy -= this.yspacing) { // gc.fillText(SymbolFactory.noteQuarterUp(), x += fontSize, yy); // } // // setupStaff(); // // yy = this.trebleFlatYpositions[Pitch.D5]; // gc.fillText(SymbolFactory.noteQuarterDownFlat(), x += fontSize, yy); // // yy = this.trebleFlatYpositions[Pitch.E5]; // gc.fillText(SymbolFactory.noteQuarterDownFlat(), x += fontSize, yy); // // yy = this.trebleFlatYpositions[Pitch.F5]; // gc.fillText(SymbolFactory.noteQuarterDownFlat(), x += fontSize, yy); // // yy = this.trebleFlatYpositions[Pitch.G5]; // gc.fillText(SymbolFactory.noteQuarterDownFlat(), x += fontSize, yy); // // yy = this.trebleFlatYpositions[Pitch.A5]; // gc.fillText(SymbolFactory.noteQuarterDownFlat(), x += fontSize, yy); // // yy = this.trebleFlatYpositions[Pitch.B5]; // gc.fillText(SymbolFactory.noteQuarterDownFlat(), x += fontSize, yy); // // yy = this.trebleFlatYpositions[Pitch.C6]; // gc.fillText(SymbolFactory.noteQuarterDownFlat(), x += fontSize, yy); // // int pitch = Pitch.EF6; // if (needFlats(pitch)) { // String ns = SymbolFactory.noteQuarterDownFlat(); // yy = this.trebleFlatYpositions[pitch]; // gc.fillText(ns, x += fontSize, yy); // } // // ledger? "staff1Line" // // /* // * "noteheadBlack": { "stemDownNW": [ 0.0, -0.184 ], "stemUpSE": [ // * 1.328, 0.184 ] }, // */ // // // gc.fillText("abcedfghijklmnopqrstuvwxyz", 50d, 150d); // // /* // * "gClef": { "alternateCodepoint": "U+1D11E", "codepoint": "U+E050" }, // */ // // // Image bgimage = new Image(getClass().getResourceAsStream( // // "/images/background.jpg")); // // ImageView imageView = ImageViewBuilder.create() // // .image(bgimage) // // .build(); // // Button b = ButtonBuilder.create() // .id("someButton") // .text("Button") // .style("-fx-font: 22 arial; -fx-base: #b6e7c9;") // // .onAction(new EventHandler<ActionEvent>() { // // @Override // // public void handle(ActionEvent e) { // // logger.debug("local button pressed {}", e); // // } // // }) // .build(); // // // not a singleton: logger.debug("button builder {}", // // ButtonBuilder.create()); // // // the controller has the action handler // //this.controller.setButton(b); // //BorderPane.setAlignment(b, Pos.CENTER); // // BorderPane borderPane = new BorderPane(); // // borderPane.setTop(toolbar); // // borderPane.setLeft(actionPane); // // borderPane.setRight(colorPane); // // borderPane.setCenter(view.getCanvasPane()); // // // borderPane.setBottom(statusBar); // // Group group = new Group(); // group.getChildren().add(canvas); // // group.getChildren().add(b); // // root = // BorderPaneBuilder // .create() // .id("rootpane") // .padding(new Insets(20)) // // .style("-fx-padding: 30") // .center(group) // .build(); // // this.scene = SceneBuilder.create() // // .root(root) // .root(borderPane) // .fill(Color.web("#103000")) // // .stylesheets("/styles/app2styles.css") // .build(); // // // MIDITrack track = MIDITrackBuilder.create() // // .noteString("C D E") // // .build(); // // track.sequential(); // // } }
rockymusic-fx/src/main/java/com/rockhoppertech/music/fx/cmn/NotationApp.java
package com.rockhoppertech.music.fx.cmn; /* * #%L * rockymusic-fx * %% * Copyright (C) 1996 - 2013 Rockhopper Technologies * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import javafx.application.Application; import javafx.beans.value.ChangeListener; import javafx.beans.value.ObservableValue; import javafx.geometry.Insets; import javafx.scene.Scene; import javafx.scene.SceneBuilder; import javafx.scene.control.Button; import javafx.scene.control.ButtonBuilder; import javafx.scene.control.ComboBox; import javafx.scene.control.ScrollPane; import javafx.scene.control.TextArea; import javafx.scene.control.TextAreaBuilder; import javafx.scene.control.TextField; import javafx.scene.control.TextFieldBuilder; import javafx.scene.layout.AnchorPane; import javafx.scene.layout.AnchorPaneBuilder; import javafx.scene.layout.BorderPane; import javafx.scene.layout.BorderPaneBuilder; import javafx.scene.layout.HBox; import javafx.scene.layout.HBoxBuilder; import javafx.scene.layout.Pane; import javafx.scene.layout.VBox; import javafx.scene.layout.VBoxBuilder; import javafx.scene.paint.Color; import javafx.stage.Screen; import javafx.stage.Stage; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.rockhoppertech.music.fx.cmn.model.StaffModel; import com.rockhoppertech.music.midi.js.MIDITrack; import com.rockhoppertech.music.midi.js.MIDITrackBuilder; /** * @author <a href="http://genedelisa.com/">Gene De Lisa</a> * */ public class NotationApp extends Application { private static final Logger logger = LoggerFactory .getLogger(NotationApp.class); Stage stage; Scene scene; Pane root; private NotationController controller; private StaffModel staffModel; // private NotationView view; // private NotationCanvas view; private StaffRegion view; // private StaffControl view; // private static ObservableList<MIDINote> tableDataList; public static void main(String[] args) throws Exception { launch(args); } @Override public void start(Stage stage) throws Exception { this.stage = stage; this.staffModel = new StaffModel(); MIDITrack track = MIDITrackBuilder .create() .noteString( "E5 F G Ab G# A B C C6 D Eb F# G A B C7 B4 Bf4 A4 Af4") .durations(1, 1.5, .5, .75, .25, .25) .sequential() .build(); System.out.println(track); this.staffModel.setTrack(track); // this.view = new NotationCanvas(this.staffModel); this.view = new StaffRegion(this.staffModel); this.controller = new NotationController(staffModel, view); this.view.drawShapes(); this.configureScene(); this.configureStage(); logger.debug("started"); } private void configureStage() { stage.setTitle("Music Notation"); // fullScreen(); stage.setScene(this.scene); controller.setStage(this.stage); stage.show(); } private void fullScreen() { // make it full screen stage.setX(0); stage.setY(0); stage.setWidth(Screen.getPrimary().getVisualBounds().getWidth()); stage.setHeight(Screen.getPrimary().getVisualBounds().getHeight()); } private void configureScene() { // TextField text = new TextField(); // text.setId("noteStringText"); // text.setEditable(true); // text.setPromptText("Enter a note string"); // controller.setTextField(text); TextArea textArea = new TextArea(); textArea.setId("noteStringText"); textArea.setEditable(true); textArea.setPromptText("Enter a note string"); textArea.setWrapText(true); textArea.setText(MIDITrack .getPitchesAsString(this.staffModel.getTrackProperty().get())); controller.setTextArea(textArea); Button b = ButtonBuilder.create() .id("noteStringButton") .style("-fx-font: 22 arial; -fx-base: #1055FF;") .text("Evaluate note string") .build(); controller.setNoteStringButton(b); Button pb = ButtonBuilder.create() .id("playButton") .style("-fx-font: 22 arial; -fx-base: #1055FF;") .text("Play") .build(); controller.setPlayButton(pb); final ComboBox<String> clefComboBox = new ComboBox<>(); clefComboBox.getItems().addAll( "Treble", "Bass", "Alto" ); clefComboBox.getSelectionModel().selectFirst(); clefComboBox.getSelectionModel().selectedItemProperty() .addListener(new ChangeListener<String>() { @Override public void changed( ObservableValue<? extends String> observable, String oldValue, String newValue) { } }); ; controller.setClefCombBox(clefComboBox); final ComboBox<Double> fontSizeComboBox = new ComboBox<>(); fontSizeComboBox.getItems().addAll(12d, 24d, 36d, 48d, 72d, 96d); fontSizeComboBox.getSelectionModel().select(3); controller.setFontSizeComboBox(fontSizeComboBox); FXTextAreaReceiver receiver = new FXTextAreaReceiver(); controller.addReceiver(receiver); HBox hbox = HBoxBuilder.create() .padding(new Insets(20)) .children(clefComboBox, fontSizeComboBox) .build(); HBox buttonbox = HBoxBuilder.create() .padding(new Insets(20)) .children(b, pb) .build(); VBox vbox = VBoxBuilder.create() .padding(new Insets(20)) .children(textArea, buttonbox, hbox, receiver) .build(); ScrollPane sp = new ScrollPane(); // sp.setContent(view.getCanvas()); sp.setContent(view); sp.setPrefSize(1300, 300); BorderPane bp = BorderPaneBuilder.create() .id("rootpane") // .padding(new Insets(20)) .style("-fx-padding: 30") .top(sp) .center(vbox) .build(); AnchorPane.setTopAnchor(bp, 10.0); AnchorPane.setBottomAnchor(bp, 10.0); AnchorPane.setLeftAnchor(bp, 10.0); AnchorPane.setRightAnchor(bp, 65.0); root = AnchorPaneBuilder.create() .children(bp) .build(); this.scene = SceneBuilder.create() .root(root) .fill(Color.web("#1030F0")) // .stylesheets("/styles/app2styles.css") .build(); } // private void configureSceneold() { // // // double fontSize = 24d; // // double fontSize = 36d; // double fontSize = 48d; // // double fontSize = 96d; // Font font = Font.loadFont( // FontApp.class.getResource("/fonts/Bravura.otf") // .toExternalForm(), // fontSize); // // FontMetrics fm; // // // Font font = new Font("Bravura", fontSize); // // Canvas canvas = new Canvas(1300, 250); // canvas.setOpacity(100); // // GraphicsContext gc = canvas.getGraphicsContext2D(); // gc.clearRect(0, 0, canvas.getWidth(), canvas.getHeight()); // gc.setFill(Color.WHITE); // gc.fillRect(0, 0, canvas.getWidth(), canvas.getHeight()); // gc.setFill(Color.BLACK); // gc.setTextBaseline(VPos.CENTER); // gc.setFont(font); // // double x = 50d; // double y = 125d; // // double yinc = fontSize / 10d + 1.2; // magic number since we lack // // font // // metrics. works for 48. too much // // for 24 or 36 // // this.yspacing = fontSize / 8d; // // gc.fillText(getGlyph("gClef"), x, y - (this.yspacing * 2d)); // // gc.fillText(getGlyph("fClef"), x, y - (this.yspacing * 6d)); // // this.trebleStaffBottom = y; // String staff = getGlyph("staff5Lines"); // for (double xx = x; xx < 1250; xx += fontSize / 2d) { // gc.fillText(staff, xx, y); // } // // gc.fillText(getGlyph("noteQuarterUp"), x += fontSize, y); // gc.fillText(getGlyph("accidentalFlat"), x += fontSize, y); // gc.fillText(getGlyph("noteQuarterDown"), x += fontSize / 3d, y); // gc.fillText(getGlyph("noteHalfUp"), x += fontSize, y); // // double yy = y; // // ascending scale // for (int i = 0; i < 12; i++, yy -= this.yspacing) { // gc.fillText(SymbolFactory.noteQuarterUp(), x += fontSize, yy); // } // // setupStaff(); // // yy = this.trebleFlatYpositions[Pitch.D5]; // gc.fillText(SymbolFactory.noteQuarterDownFlat(), x += fontSize, yy); // // yy = this.trebleFlatYpositions[Pitch.E5]; // gc.fillText(SymbolFactory.noteQuarterDownFlat(), x += fontSize, yy); // // yy = this.trebleFlatYpositions[Pitch.F5]; // gc.fillText(SymbolFactory.noteQuarterDownFlat(), x += fontSize, yy); // // yy = this.trebleFlatYpositions[Pitch.G5]; // gc.fillText(SymbolFactory.noteQuarterDownFlat(), x += fontSize, yy); // // yy = this.trebleFlatYpositions[Pitch.A5]; // gc.fillText(SymbolFactory.noteQuarterDownFlat(), x += fontSize, yy); // // yy = this.trebleFlatYpositions[Pitch.B5]; // gc.fillText(SymbolFactory.noteQuarterDownFlat(), x += fontSize, yy); // // yy = this.trebleFlatYpositions[Pitch.C6]; // gc.fillText(SymbolFactory.noteQuarterDownFlat(), x += fontSize, yy); // // int pitch = Pitch.EF6; // if (needFlats(pitch)) { // String ns = SymbolFactory.noteQuarterDownFlat(); // yy = this.trebleFlatYpositions[pitch]; // gc.fillText(ns, x += fontSize, yy); // } // // ledger? "staff1Line" // // /* // * "noteheadBlack": { "stemDownNW": [ 0.0, -0.184 ], "stemUpSE": [ // * 1.328, 0.184 ] }, // */ // // // gc.fillText("abcedfghijklmnopqrstuvwxyz", 50d, 150d); // // /* // * "gClef": { "alternateCodepoint": "U+1D11E", "codepoint": "U+E050" }, // */ // // // Image bgimage = new Image(getClass().getResourceAsStream( // // "/images/background.jpg")); // // ImageView imageView = ImageViewBuilder.create() // // .image(bgimage) // // .build(); // // Button b = ButtonBuilder.create() // .id("someButton") // .text("Button") // .style("-fx-font: 22 arial; -fx-base: #b6e7c9;") // // .onAction(new EventHandler<ActionEvent>() { // // @Override // // public void handle(ActionEvent e) { // // logger.debug("local button pressed {}", e); // // } // // }) // .build(); // // // not a singleton: logger.debug("button builder {}", // // ButtonBuilder.create()); // // // the controller has the action handler // //this.controller.setButton(b); // //BorderPane.setAlignment(b, Pos.CENTER); // // BorderPane borderPane = new BorderPane(); // // borderPane.setTop(toolbar); // // borderPane.setLeft(actionPane); // // borderPane.setRight(colorPane); // // borderPane.setCenter(view.getCanvasPane()); // // // borderPane.setBottom(statusBar); // // Group group = new Group(); // group.getChildren().add(canvas); // // group.getChildren().add(b); // // root = // BorderPaneBuilder // .create() // .id("rootpane") // .padding(new Insets(20)) // // .style("-fx-padding: 30") // .center(group) // .build(); // // this.scene = SceneBuilder.create() // // .root(root) // .root(borderPane) // .fill(Color.web("#103000")) // // .stylesheets("/styles/app2styles.css") // .build(); // // // MIDITrack track = MIDITrackBuilder.create() // // .noteString("C D E") // // .build(); // // track.sequential(); // // } }
Removing builders
rockymusic-fx/src/main/java/com/rockhoppertech/music/fx/cmn/NotationApp.java
Removing builders
<ide><path>ockymusic-fx/src/main/java/com/rockhoppertech/music/fx/cmn/NotationApp.java <ide> textArea.setEditable(true); <ide> textArea.setPromptText("Enter a note string"); <ide> textArea.setWrapText(true); <del> textArea.setText(MIDITrack <del> .getPitchesAsString(this.staffModel.getTrackProperty().get())); <add>// textArea.setText(MIDITrack <add> //.getPitchesAsString(this.staffModel.getTrackProperty().get())); <add> <add> textArea.setText(this.staffModel.getTrackProperty().get().toBriefMIDIString("\n")); <ide> controller.setTextArea(textArea); <ide> <ide> Button b = ButtonBuilder.create()
Java
mit
dd28a84ccc917300a505f2ae4b3b7074803b83e2
0
lucko/helper
/* * Copyright (c) 2017 Lucko (Luck) <[email protected]> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package me.lucko.helper.serialize; import com.google.common.base.Preconditions; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import org.bukkit.Bukkit; import org.bukkit.Location; import org.bukkit.block.Block; import org.bukkit.block.BlockFace; /** * An immutable and serializable location object */ public final class BlockPosition { public static BlockPosition deserialize(JsonElement element) { Preconditions.checkArgument(element.isJsonObject()); JsonObject object = element.getAsJsonObject(); Preconditions.checkArgument(object.has("x")); Preconditions.checkArgument(object.has("y")); Preconditions.checkArgument(object.has("z")); Preconditions.checkArgument(object.has("world")); int x = object.get("x").getAsInt(); int y = object.get("y").getAsInt(); int z = object.get("z").getAsInt(); String world = object.get("world").getAsString(); return of(x, y, z, world); } public static BlockPosition of(int x, int y, int z, String world) { return new BlockPosition(x, y, z, world); } public static BlockPosition of(Location location) { return of(location.getBlockX(), location.getBlockY(), location.getBlockZ(), location.getWorld().getName()); } public static BlockPosition of(Block block) { return of(block.getLocation()); } private final int x; private final int y; private final int z; private final String world; private Location bukkitLocation = null; private BlockPosition(int x, int y, int z, String world) { this.x = x; this.y = y; this.z = z; this.world = world; } public int getX() { return this.x; } public int getY() { return this.y; } public int getZ() { return this.z; } public String getWorld() { return this.world; } public synchronized Location toLocation() { if (bukkitLocation == null) { bukkitLocation = new Location(Bukkit.getWorld(world), x, y, z); } return bukkitLocation.clone(); } public Block toBlock() { return toLocation().getBlock(); } public BlockPosition getRelative(BlockFace face) { return BlockPosition.of(x + face.getModX(), y + face.getModY(), z + face.getModZ(), world); } public BlockPosition getRelative(BlockFace face, int distance) { return BlockPosition.of(x + (face.getModX() * distance), y + (face.getModY() * distance), z + (face.getModZ() * distance), world); } public BlockPosition add(int x, int y, int z) { return BlockPosition.of(this.x + x, this.y + y, this.z + z, world); } public BlockPosition subtract(int x, int y, int z) { return add(-x, -y, -z); } public JsonObject serialize() { JsonObject object = new JsonObject(); object.addProperty("x", x); object.addProperty("y", y); object.addProperty("z", z); object.addProperty("world", world); return object; } @Override public boolean equals(Object o) { if (o == this) return true; if (!(o instanceof BlockPosition)) return false; final BlockPosition other = (BlockPosition) o; return this.getX() == other.getX() && this.getY() == other.getY() && this.getZ() == other.getZ() && (this.getWorld() == null ? other.getWorld() == null : this.getWorld().equals(other.getWorld())); } @Override public int hashCode() { final int PRIME = 59; int result = 1; result = result * PRIME + this.getX(); result = result * PRIME + this.getY(); result = result * PRIME + this.getZ(); result = result * PRIME + (this.getWorld() == null ? 43 : this.getWorld().hashCode()); return result; } @Override public String toString() { return "BlockPosition(x=" + this.getX() + ", y=" + this.getY() + ", z=" + this.getZ() + ", world=" + this.getWorld() + ")"; } }
src/main/java/me/lucko/helper/serialize/BlockPosition.java
/* * Copyright (c) 2017 Lucko (Luck) <[email protected]> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package me.lucko.helper.serialize; import com.google.common.base.Preconditions; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import org.bukkit.Bukkit; import org.bukkit.Location; import org.bukkit.block.Block; import org.bukkit.block.BlockFace; /** * An immutable and serializable location object */ public final class BlockPosition { public static BlockPosition deserialize(JsonElement element) { Preconditions.checkArgument(element.isJsonObject()); JsonObject object = element.getAsJsonObject(); Preconditions.checkArgument(object.has("x")); Preconditions.checkArgument(object.has("y")); Preconditions.checkArgument(object.has("z")); Preconditions.checkArgument(object.has("world")); int x = object.get("x").getAsInt(); int y = object.get("y").getAsInt(); int z = object.get("z").getAsInt(); String world = object.get("world").getAsString(); return of(x, y, z, world); } public static BlockPosition of(int x, int y, int z, String world) { return new BlockPosition(x, y, z, world, null); } public static BlockPosition of(Location location) { return of(location.getBlockX(), location.getBlockY(), location.getBlockZ(), location.getWorld().getName()).setBukkitLocation(location); } public static BlockPosition of(Block block) { return of(block.getLocation()); } private final int x; private final int y; private final int z; private final String world; private Location bukkitLocation; private BlockPosition(int x, int y, int z, String world, Location bukkitLocation) { this.x = x; this.y = y; this.z = z; this.world = world; this.bukkitLocation = bukkitLocation; } public int getX() { return this.x; } public int getY() { return this.y; } public int getZ() { return this.z; } public String getWorld() { return this.world; } public synchronized Location toLocation() { if (bukkitLocation == null) { bukkitLocation = new Location(Bukkit.getWorld(world), x, y, z); } return bukkitLocation; } public Block toBlock() { return toLocation().getBlock(); } private BlockPosition setBukkitLocation(Location bukkitLocation) { this.bukkitLocation = bukkitLocation; return this; } public BlockPosition getRelative(BlockFace face) { return BlockPosition.of(x + face.getModX(), y + face.getModY(), z + face.getModZ(), world); } public BlockPosition getRelative(BlockFace face, int distance) { return BlockPosition.of(x + (face.getModX() * distance), y + (face.getModY() * distance), z + (face.getModZ() * distance), world); } public BlockPosition add(int x, int y, int z) { return BlockPosition.of(this.x + x, this.y + y, this.z + z, world); } public BlockPosition subtract(int x, int y, int z) { return add(-x, -y, -z); } public JsonObject serialize() { JsonObject object = new JsonObject(); object.addProperty("x", x); object.addProperty("y", y); object.addProperty("z", z); object.addProperty("world", world); return object; } @Override public boolean equals(Object o) { if (o == this) return true; if (!(o instanceof BlockPosition)) return false; final BlockPosition other = (BlockPosition) o; return this.getX() == other.getX() && this.getY() == other.getY() && this.getZ() == other.getZ() && (this.getWorld() == null ? other.getWorld() == null : this.getWorld().equals(other.getWorld())); } @Override public int hashCode() { final int PRIME = 59; int result = 1; result = result * PRIME + this.getX(); result = result * PRIME + this.getY(); result = result * PRIME + this.getZ(); result = result * PRIME + (this.getWorld() == null ? 43 : this.getWorld().hashCode()); return result; } @Override public String toString() { return "BlockPosition(x=" + this.getX() + ", y=" + this.getY() + ", z=" + this.getZ() + ", world=" + this.getWorld() + ")"; } }
fix potential issue with cloned locations
src/main/java/me/lucko/helper/serialize/BlockPosition.java
fix potential issue with cloned locations
<ide><path>rc/main/java/me/lucko/helper/serialize/BlockPosition.java <ide> } <ide> <ide> public static BlockPosition of(int x, int y, int z, String world) { <del> return new BlockPosition(x, y, z, world, null); <add> return new BlockPosition(x, y, z, world); <ide> } <ide> <ide> public static BlockPosition of(Location location) { <del> return of(location.getBlockX(), location.getBlockY(), location.getBlockZ(), location.getWorld().getName()).setBukkitLocation(location); <add> return of(location.getBlockX(), location.getBlockY(), location.getBlockZ(), location.getWorld().getName()); <ide> } <ide> <ide> public static BlockPosition of(Block block) { <ide> private final int z; <ide> private final String world; <ide> <del> private Location bukkitLocation; <add> private Location bukkitLocation = null; <ide> <del> private BlockPosition(int x, int y, int z, String world, Location bukkitLocation) { <add> private BlockPosition(int x, int y, int z, String world) { <ide> this.x = x; <ide> this.y = y; <ide> this.z = z; <ide> this.world = world; <del> this.bukkitLocation = bukkitLocation; <ide> } <ide> <ide> public int getX() { <ide> bukkitLocation = new Location(Bukkit.getWorld(world), x, y, z); <ide> } <ide> <del> return bukkitLocation; <add> return bukkitLocation.clone(); <ide> } <ide> <ide> public Block toBlock() { <ide> return toLocation().getBlock(); <del> } <del> <del> private BlockPosition setBukkitLocation(Location bukkitLocation) { <del> this.bukkitLocation = bukkitLocation; <del> return this; <ide> } <ide> <ide> public BlockPosition getRelative(BlockFace face) {
Java
apache-2.0
1306364a2fbd381a338402dc7963451d30e1fc8e
0
dschadow/ApplicationIntrusionDetection,dschadow/ApplicationIntrusionDetection
/* * Copyright (C) 2017 Dominik Schadow, [email protected] * * This file is part of the Application Intrusion Detection project. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.dominikschadow.dukeencounters.confirmation; import de.dominikschadow.dukeencounters.encounter.EncounterService; import de.dominikschadow.dukeencounters.user.UserService; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.BDDMockito.given; /** * Tests the [@link ConfirmationService} class. * * @author Dominik Schadow */ public class ConfirmationServiceTest { @Rule public ExpectedException thrown = ExpectedException.none(); @Mock private ConfirmationRepository repository; @Mock private UserService userService; @Mock private EncounterService encounterService; private ConfirmationService service; private Confirmation testConfirmation; @Before public void setup() { MockitoAnnotations.initMocks(this); service = new ConfirmationService(repository, userService, encounterService); testConfirmation = new Confirmation(); testConfirmation.setId(1); } @Test public void getConfirmationsByUsernameWhenUsernameIsNullShouldThrowException() throws Exception { thrown.expect(NullPointerException.class); thrown.expectMessage("username"); service.getConfirmationsByUsername(null); } @Test public void getConfirmationByUsernameAndEncounterIdWhenUsernameAndIdAreValidReturnsConfirmation() throws Exception { given(repository.findByUsernameAndEncounterId("test", 1)).willReturn(testConfirmation); Confirmation confirmation = service.getConfirmationByUsernameAndEncounterId("test", 1); assertThat(confirmation.getId()).isEqualTo(1); } }
duke-encounters/src/test/java/de/dominikschadow/dukeencounters/confirmation/ConfirmationServiceTest.java
/* * Copyright (C) 2017 Dominik Schadow, [email protected] * * This file is part of the Application Intrusion Detection project. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.dominikschadow.dukeencounters.confirmation; import de.dominikschadow.dukeencounters.encounter.EncounterService; import de.dominikschadow.dukeencounters.user.UserService; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.BDDMockito.given; /** * Tests the [@link ConfirmationService} class. * * @author Dominik Schadow */ public class ConfirmationServiceTest { @Rule public ExpectedException thrown = ExpectedException.none(); @Mock private ConfirmationRepository repository; @Mock private UserService userService; @Mock private EncounterService encounterService; private ConfirmationService service; private Confirmation testConfirmation; @Before public void setup() { MockitoAnnotations.initMocks(this); service = new ConfirmationService(repository, userService, encounterService); testConfirmation = new Confirmation(); testConfirmation.setId(1); } @Test public void getConfirmationsByUsernameWhenUsernameIsNullShouldThrowException() throws Exception { thrown.expect(NullPointerException.class); thrown.expectMessage("username"); service.getConfirmationsByUsername(null); } @Test public void getConfirmationByUsernameAndEncounterId() throws Exception { given(repository.findByUsernameAndEncounterId("test", 1)).willReturn(testConfirmation); Confirmation confirmation = service.getConfirmationByUsernameAndEncounterId("test", 1); assertThat(confirmation.getId()).isEqualTo(1); } }
Renamed test
duke-encounters/src/test/java/de/dominikschadow/dukeencounters/confirmation/ConfirmationServiceTest.java
Renamed test
<ide><path>uke-encounters/src/test/java/de/dominikschadow/dukeencounters/confirmation/ConfirmationServiceTest.java <ide> } <ide> <ide> @Test <del> public void getConfirmationByUsernameAndEncounterId() throws Exception { <add> public void getConfirmationByUsernameAndEncounterIdWhenUsernameAndIdAreValidReturnsConfirmation() throws Exception { <ide> given(repository.findByUsernameAndEncounterId("test", 1)).willReturn(testConfirmation); <ide> Confirmation confirmation = service.getConfirmationByUsernameAndEncounterId("test", 1); <ide>
Java
apache-2.0
943b6ed12cbe9528936070a8b6d7995f79a410f6
0
andy256/druid,Kleagleguo/druid,taochaoqiang/druid,fjy/druid,Fokko/druid,praveev/druid,Deebs21/druid,calliope7/druid,skyportsystems/druid,rasahner/druid,dclim/druid,Fokko/druid,yaochitc/druid-dev,andy256/druid,noddi/druid,anupkumardixit/druid,monetate/druid,druid-io/druid,pjain1/druid,metamx/druid,wenjixin/druid,cocosli/druid,leventov/druid,optimizely/druid,rasahner/druid,tubemogul/druid,metamx/druid,Fokko/druid,Kleagleguo/druid,implydata/druid,zhiqinghuang/druid,authbox-lib/druid,pombredanne/druid,mangeshpardeshiyahoo/druid,elijah513/druid,dclim/druid,fjy/druid,zhaown/druid,amikey/druid,leventov/druid,qix/druid,anupkumardixit/druid,implydata/druid,friedhardware/druid,noddi/druid,Deebs21/druid,metamx/druid,gianm/druid,mghosh4/druid,mrijke/druid,dkhwangbo/druid,nvoron23/druid,nvoron23/druid,druid-io/druid,guobingkun/druid,authbox-lib/druid,leventov/druid,wenjixin/druid,andy256/druid,potto007/druid-avro,redBorder/druid,haoch/druid,gianm/druid,michaelschiff/druid,premc/druid,jon-wei/druid,se7entyse7en/druid,penuel-leo/druid,knoguchi/druid,haoch/druid,b-slim/druid,pombredanne/druid,friedhardware/druid,authbox-lib/druid,authbox-lib/druid,erikdubbelboer/druid,implydata/druid,zhihuij/druid,himanshug/druid,nishantmonu51/druid,b-slim/druid,minewhat/druid,monetate/druid,KurtYoung/druid,haoch/druid,zxs/druid,qix/druid,767326791/druid,deltaprojects/druid,authbox-lib/druid,zhiqinghuang/druid,Deebs21/druid,zengzhihai110/druid,elijah513/druid,KurtYoung/druid,lcp0578/druid,fjy/druid,liquidm/druid,premc/druid,nvoron23/druid,mghosh4/druid,erikdubbelboer/druid,nvoron23/druid,praveev/druid,yaochitc/druid-dev,premc/druid,zengzhihai110/druid,skyportsystems/druid,rasahner/druid,deltaprojects/druid,himanshug/druid,cocosli/druid,gianm/druid,minewhat/druid,pjain1/druid,zhihuij/druid,se7entyse7en/druid,redBorder/druid,zxs/druid,zxs/druid,nishantmonu51/druid,lcp0578/druid,minewhat/druid,767326791/druid,pombredanne/druid,himanshug/druid,pdeva/druid,guobingkun/druid,eshen1991/druid,guobingkun/druid,monetate/druid,zhaown/druid,eshen1991/druid,milimetric/druid,anupkumardixit/druid,milimetric/druid,skyportsystems/druid,du00cs/druid,lcp0578/druid,767326791/druid,praveev/druid,mrijke/druid,jon-wei/druid,mrijke/druid,andy256/druid,deltaprojects/druid,andy256/druid,winval/druid,druid-io/druid,calliope7/druid,Deebs21/druid,tubemogul/druid,solimant/druid,du00cs/druid,noddi/druid,tubemogul/druid,zengzhihai110/druid,mrijke/druid,michaelschiff/druid,friedhardware/druid,optimizely/druid,minewhat/druid,fjy/druid,himanshug/druid,zhaown/druid,redBorder/druid,deltaprojects/druid,pombredanne/druid,potto007/druid-avro,wenjixin/druid,liquidm/druid,pjain1/druid,deltaprojects/druid,zhiqinghuang/druid,skyportsystems/druid,premc/druid,jon-wei/druid,Deebs21/druid,lizhanhui/data_druid,milimetric/druid,potto007/druid-avro,mghosh4/druid,guobingkun/druid,zhihuij/druid,Kleagleguo/druid,OttoOps/druid,metamx/druid,767326791/druid,dclim/druid,knoguchi/druid,dkhwangbo/druid,praveev/druid,Fokko/druid,smartpcr/druid,eshen1991/druid,du00cs/druid,premc/druid,lcp0578/druid,himanshug/druid,druid-io/druid,amikey/druid,se7entyse7en/druid,Fokko/druid,penuel-leo/druid,qix/druid,tubemogul/druid,anupkumardixit/druid,mrijke/druid,taochaoqiang/druid,penuel-leo/druid,zhihuij/druid,milimetric/druid,minewhat/druid,OttoOps/druid,nishantmonu51/druid,dclim/druid,noddi/druid,KurtYoung/druid,michaelschiff/druid,monetate/druid,redBorder/druid,kevintvh/druid,anupkumardixit/druid,zengzhihai110/druid,winval/druid,dkhwangbo/druid,pjain1/druid,pjain1/druid,calliope7/druid,zhihuij/druid,leventov/druid,dclim/druid,se7entyse7en/druid,pdeva/druid,liquidm/druid,yaochitc/druid-dev,mangeshpardeshiyahoo/druid,redBorder/druid,smartpcr/druid,lizhanhui/data_druid,dkhwangbo/druid,calliope7/druid,michaelschiff/druid,KurtYoung/druid,optimizely/druid,smartpcr/druid,potto007/druid-avro,leventov/druid,zhiqinghuang/druid,skyportsystems/druid,zhaown/druid,eshen1991/druid,zhaown/druid,guobingkun/druid,deltaprojects/druid,cocosli/druid,tubemogul/druid,solimant/druid,du00cs/druid,solimant/druid,yaochitc/druid-dev,friedhardware/druid,liquidm/druid,Kleagleguo/druid,winval/druid,metamx/druid,mangeshpardeshiyahoo/druid,haoch/druid,lizhanhui/data_druid,pjain1/druid,rasahner/druid,jon-wei/druid,mangeshpardeshiyahoo/druid,Fokko/druid,zxs/druid,liquidm/druid,zxs/druid,nishantmonu51/druid,KurtYoung/druid,mghosh4/druid,pdeva/druid,elijah513/druid,amikey/druid,praveev/druid,eshen1991/druid,pdeva/druid,nishantmonu51/druid,zengzhihai110/druid,wenjixin/druid,taochaoqiang/druid,erikdubbelboer/druid,noddi/druid,erikdubbelboer/druid,gianm/druid,amikey/druid,implydata/druid,optimizely/druid,b-slim/druid,michaelschiff/druid,yaochitc/druid-dev,knoguchi/druid,knoguchi/druid,kevintvh/druid,knoguchi/druid,amikey/druid,calliope7/druid,taochaoqiang/druid,kevintvh/druid,monetate/druid,friedhardware/druid,implydata/druid,winval/druid,optimizely/druid,lizhanhui/data_druid,liquidm/druid,mangeshpardeshiyahoo/druid,fjy/druid,winval/druid,lcp0578/druid,nishantmonu51/druid,solimant/druid,nvoron23/druid,nishantmonu51/druid,penuel-leo/druid,gianm/druid,haoch/druid,qix/druid,wenjixin/druid,erikdubbelboer/druid,pombredanne/druid,milimetric/druid,OttoOps/druid,rasahner/druid,pdeva/druid,elijah513/druid,mghosh4/druid,gianm/druid,cocosli/druid,pjain1/druid,jon-wei/druid,smartpcr/druid,b-slim/druid,se7entyse7en/druid,kevintvh/druid,deltaprojects/druid,jon-wei/druid,solimant/druid,taochaoqiang/druid,kevintvh/druid,qix/druid,potto007/druid-avro,monetate/druid,smartpcr/druid,Kleagleguo/druid,penuel-leo/druid,implydata/druid,OttoOps/druid,elijah513/druid,mghosh4/druid,767326791/druid,lizhanhui/data_druid,dkhwangbo/druid,zhiqinghuang/druid,jon-wei/druid,druid-io/druid,cocosli/druid,gianm/druid,michaelschiff/druid,michaelschiff/druid,OttoOps/druid,b-slim/druid,mghosh4/druid,monetate/druid,Fokko/druid,du00cs/druid
/* * Druid - a distributed column store. * Copyright (C) 2012, 2013 Metamarkets Group Inc. * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ package io.druid; import com.google.common.collect.Lists; import com.google.common.util.concurrent.ListenableFuture; import com.google.inject.Binder; import com.google.inject.Inject; import com.google.inject.Injector; import com.google.inject.Module; import com.google.inject.servlet.GuiceFilter; import com.metamx.common.lifecycle.Lifecycle; import com.metamx.http.client.HttpClient; import com.metamx.http.client.response.StatusResponseHandler; import com.metamx.http.client.response.StatusResponseHolder; import io.druid.guice.Jerseys; import io.druid.guice.LazySingleton; import io.druid.guice.annotations.Global; import io.druid.initialization.Initialization; import io.druid.server.initialization.JettyServerInitializer; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.DefaultHandler; import org.eclipse.jetty.server.handler.HandlerList; import org.eclipse.jetty.servlet.DefaultServlet; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.eclipse.jetty.servlets.GzipFilter; import org.junit.Ignore; import org.junit.Test; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.Response; import java.net.URL; import java.nio.charset.Charset; import java.util.Random; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; public class JettyTest { public static void setProperties() { System.setProperty("druid.host", "localhost:9999"); System.setProperty("druid.port", "9999"); System.setProperty("druid.server.http.numThreads", "20"); System.setProperty("druid.service", "test"); System.setProperty("druid.server.http.maxIdleTime", "PT1S"); System.setProperty("druid.global.http.readTimeout", "PT1S"); } @Test @Ignore // this test will deadlock if it hits an issue, so ignored by default public void testTimeouts() throws Exception { // test for request timeouts properly not locking up all threads setProperties(); Injector injector = Initialization.makeInjectorWithModules( Initialization.makeStartupInjector(), Lists.<Object>newArrayList( new Module() { @Override public void configure(Binder binder) { binder.bind(JettyServerInitializer.class).to(JettyServerInit.class).in(LazySingleton.class); Jerseys.addResource(binder, SlowResource.class); } } ) ); Lifecycle lifecycle = injector.getInstance(Lifecycle.class); // Jetty is Lazy Initialized do a getInstance injector.getInstance(Server.class); lifecycle.start(); ClientHolder holder = injector.getInstance(ClientHolder.class); final HttpClient client = holder.getClient(); final Executor executor = Executors.newFixedThreadPool(100); final AtomicLong count = new AtomicLong(0); final CountDownLatch latch = new CountDownLatch(1000); for (int i = 0; i < 10000; i++) { executor.execute( new Runnable() { @Override public void run() { executor.execute( new Runnable() { @Override public void run() { long startTime = System.currentTimeMillis(); long startTime2 = 0; try { ListenableFuture<StatusResponseHolder> go = client.get( new URL( "http://localhost:9999/slow/hello" ) ) .go(new StatusResponseHandler(Charset.defaultCharset())); startTime2 = System.currentTimeMillis(); go.get(); } catch (Exception e) { e.printStackTrace(); } finally { System.out .println( "Response time client" + (System.currentTimeMillis() - startTime) + "time taken for getting future" + (System.currentTimeMillis() - startTime2) + "Counter " + count.incrementAndGet() ); latch.countDown(); } } } ); } } ); } latch.await(); lifecycle.stop(); } public static class ClientHolder { HttpClient client; @Inject ClientHolder(@Global HttpClient client) { this.client = client; } public HttpClient getClient() { return client; } } public static class JettyServerInit implements JettyServerInitializer { @Override public void initialize(Server server, Injector injector) { final ServletContextHandler root = new ServletContextHandler(ServletContextHandler.SESSIONS); root.addServlet(new ServletHolder(new DefaultServlet()), "/*"); root.addFilter(GzipFilter.class, "/*", null); root.addFilter(GuiceFilter.class, "/*", null); final HandlerList handlerList = new HandlerList(); handlerList.setHandlers(new Handler[]{root, new DefaultHandler()}); server.setHandler(handlerList); } } @Path("/slow") public static class SlowResource { public static Random random = new Random(); @GET @Path("/hello") @Produces("application/json") public Response hello() { try { TimeUnit.MILLISECONDS.sleep(100 + random.nextInt(2000)); } catch (InterruptedException e) { // } return Response.ok("hello").build(); } } }
server/src/test/java/io/druid/server/initialization/JettyTest.java
/* * Druid - a distributed column store. * Copyright (C) 2012, 2013 Metamarkets Group Inc. * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ package io.druid; import com.google.common.collect.Lists; import com.google.common.util.concurrent.ListenableFuture; import com.google.inject.Binder; import com.google.inject.Inject; import com.google.inject.Injector; import com.google.inject.Module; import com.google.inject.servlet.GuiceFilter; import com.metamx.common.lifecycle.Lifecycle; import com.metamx.http.client.HttpClient; import com.metamx.http.client.response.StatusResponseHandler; import com.metamx.http.client.response.StatusResponseHolder; import io.druid.guice.Jerseys; import io.druid.guice.LazySingleton; import io.druid.guice.annotations.Global; import io.druid.initialization.Initialization; import io.druid.server.initialization.JettyServerInitializer; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.DefaultHandler; import org.eclipse.jetty.server.handler.HandlerList; import org.eclipse.jetty.servlet.DefaultServlet; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.eclipse.jetty.servlets.GzipFilter; import org.junit.Ignore; import org.junit.Test; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.Response; import java.net.URL; import java.nio.charset.Charset; import java.util.Random; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; public class JettyTest { public static void setProperties() { System.setProperty("druid.host", "localhost:9999"); System.setProperty("druid.port", "9999"); System.setProperty("druid.server.http.numThreads", "20"); System.setProperty("druid.service", "test"); System.setProperty("druid.server.http.maxIdleTime", "PT1S"); System.setProperty("druid.global.http.readTimeout", "PT1S"); } @Test @Ignore public void testTimeouts() throws Exception { setProperties(); Injector injector = Initialization.makeInjectorWithModules( Initialization.makeStartupInjector(), Lists.<Object>newArrayList( new Module() { @Override public void configure(Binder binder) { binder.bind(JettyServerInitializer.class).to(JettyServerInit.class).in(LazySingleton.class); Jerseys.addResource(binder, SlowResource.class); } } ) ); Lifecycle lifecycle = injector.getInstance(Lifecycle.class); // Jetty is Lazy Initialized do a getInstance injector.getInstance(Server.class); lifecycle.start(); ClientHolder holder = injector.getInstance(ClientHolder.class); final HttpClient client = holder.getClient(); final Executor executor = Executors.newFixedThreadPool(100); final AtomicLong count = new AtomicLong(0); final CountDownLatch latch = new CountDownLatch(1000); for (int i = 0; i < 10000; i++) { executor.execute( new Runnable() { @Override public void run() { executor.execute( new Runnable() { @Override public void run() { long startTime = System.currentTimeMillis(); long startTime2 = 0; try { ListenableFuture<StatusResponseHolder> go = client.get( new URL( "http://localhost:9999/slow/hello" ) ) .go(new StatusResponseHandler(Charset.defaultCharset())); startTime2 = System.currentTimeMillis(); go.get(); } catch (Exception e) { e.printStackTrace(); } finally { System.out .println( "Response time client" + (System.currentTimeMillis() - startTime) + "time taken for getting future" + (System.currentTimeMillis() - startTime2) + "Counter " + count.incrementAndGet() ); latch.countDown(); } } } ); } } ); } latch.await(); lifecycle.stop(); } public static class ClientHolder { HttpClient client; @Inject ClientHolder(@Global HttpClient client) { this.client = client; } public HttpClient getClient() { return client; } } public static class JettyServerInit implements JettyServerInitializer { @Override public void initialize(Server server, Injector injector) { final ServletContextHandler root = new ServletContextHandler(ServletContextHandler.SESSIONS); root.addServlet(new ServletHolder(new DefaultServlet()), "/*"); root.addFilter(GzipFilter.class, "/*", null); root.addFilter(GuiceFilter.class, "/*", null); final HandlerList handlerList = new HandlerList(); handlerList.setHandlers(new Handler[]{root, new DefaultHandler()}); server.setHandler(handlerList); } } @Path("/slow") public static class SlowResource { public static Random random = new Random(); @GET @Path("/hello") @Produces("application/json") public Response hello() { try { TimeUnit.MILLISECONDS.sleep(100 + random.nextInt(2000)); } catch (InterruptedException e) { // } return Response.ok("hello").build(); } } }
add comments
server/src/test/java/io/druid/server/initialization/JettyTest.java
add comments
<ide><path>erver/src/test/java/io/druid/server/initialization/JettyTest.java <ide> System.setProperty("druid.global.http.readTimeout", "PT1S"); <ide> } <ide> <del> @Test <del> @Ignore <add> @Test @Ignore // this test will deadlock if it hits an issue, so ignored by default <ide> public void testTimeouts() throws Exception <ide> { <add> // test for request timeouts properly not locking up all threads <add> <ide> setProperties(); <ide> Injector injector = Initialization.makeInjectorWithModules( <ide> Initialization.makeStartupInjector(), Lists.<Object>newArrayList(
Java
apache-2.0
11ff4a94f5768ffdf526d89bfb030a053fa64304
0
j-fuentes/cas,zhoffice/cas,kalatestimine/cas,fogbeam/fogbeam_cas,fengbaicanhe/cas,jacklotusho/cas,mduszyk/cas,zhangwei5095/jasig-cas-server,rallportctr/cas,DICE-UNC/cas,creamer/cas,rallportctr/cas,zawn/cas,zhangjianTFTC/cas,fengbaicanhe/cas,austgl/cas,joansmith/cas,icereval/cas,zhangwei5095/jasig-cas-server,Kevin2030/cas,zhangjianTFTC/cas,thomasdarimont/cas,mduszyk/cas,thomasdarimont/cas,PetrGasparik/cas,PetrGasparik/cas,ssmyka/cas,yisiqi/cas,mduszyk/cas,thomasdarimont/cas,eBaoTech/cas,luneo7/cas,j-fuentes/cas,zhaorui1/cas,ssmyka/cas,creamer/cas,nader93k/cas,j-fuentes/cas,j-fuentes/cas,creamer/cas,PetrGasparik/cas,fogbeam/fogbeam_cas,fannyfinal/cas,HuangWeiWei1919/cas,battags/cas,rallportctr/cas,luneo7/cas,austgl/cas,Kuohong/cas,Kuohong/cas,zhangwei5095/jasig-cas-server,fannyfinal/cas,Kuohong/cas,austgl/cas,austgl/cas,moghaddam/cas,yisiqi/cas,vbonamy/cas,DICE-UNC/cas,vbonamy/cas,jasonchw/cas,kalatestimine/cas,zhangjianTFTC/cas,icereval/cas,Kuohong/cas,yisiqi/cas,Kevin2030/cas,nestle1998/cas,thomasdarimont/cas,lijihuai/cas,jasonchw/cas,joansmith/cas,rallportctr/cas,joansmith/cas,zhangjianTFTC/cas,icereval/cas,eBaoTech/cas,moghaddam/cas,youjava/cas,nestle1998/cas,joansmith/cas,Kevin2030/cas,jacklotusho/cas,jacklotusho/cas,fogbeam/fogbeam_cas,moghaddam/cas,yisiqi/cas,vbonamy/cas,battags/cas,icereval/cas,nestle1998/cas,ssmyka/cas,zawn/cas,jasonchw/cas,zhaorui1/cas,Kevin2030/cas,ssmyka/cas,battags/cas,nader93k/cas,eBaoTech/cas,zhoffice/cas,fengbaicanhe/cas,jasonchw/cas,zhangwei5095/jasig-cas-server,youjava/cas,fengbaicanhe/cas,DICE-UNC/cas,kalatestimine/cas,mduszyk/cas,zhoffice/cas,youjava/cas,nader93k/cas,HuangWeiWei1919/cas,jacklotusho/cas,fannyfinal/cas,luneo7/cas,kalatestimine/cas,zhaorui1/cas,nader93k/cas,lijihuai/cas,zhoffice/cas,lijihuai/cas,fannyfinal/cas,fogbeam/fogbeam_cas,HuangWeiWei1919/cas,DICE-UNC/cas,eBaoTech/cas,lijihuai/cas,zhaorui1/cas,youjava/cas,battags/cas,HuangWeiWei1919/cas,zawn/cas,PetrGasparik/cas,nestle1998/cas,luneo7/cas,zawn/cas,vbonamy/cas,moghaddam/cas,creamer/cas
/* * Licensed to Apereo under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Apereo licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at the following location: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jasig.cas.adaptors.x509.authentication.handler.support; import java.security.GeneralSecurityException; import java.security.cert.X509CRL; import java.security.cert.X509CRLEntry; import java.security.cert.X509Certificate; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import javax.validation.constraints.NotNull; import org.jasig.cas.adaptors.x509.util.CertUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Base class for all CRL-based revocation checkers. * * @author Marvin S. Addison * @since 3.4.6 * */ public abstract class AbstractCRLRevocationChecker implements RevocationChecker { /** Logger instance. **/ protected final Logger logger = LoggerFactory.getLogger(getClass()); /** * Flag to indicate whether all * crls should be checked for the cert resource. * Defaults to <code>false</code>. **/ protected boolean checkAll; /** Policy to apply when CRL data is unavailable. */ @NotNull private RevocationPolicy<Void> unavailableCRLPolicy = new DenyRevocationPolicy(); /** Policy to apply when CRL data has expired. */ @NotNull private RevocationPolicy<X509CRL> expiredCRLPolicy = new ThresholdExpiredCRLRevocationPolicy(); /** * {@inheritDoc} **/ @Override public void check(final X509Certificate cert) throws GeneralSecurityException { if (cert == null) { throw new IllegalArgumentException("Certificate cannot be null."); } logger.debug("Evaluating certificate revocation status for {}", CertUtils.toString(cert)); final Collection<X509CRL> crls = getCRLs(cert); if (crls == null || crls.isEmpty()) { logger.warn("CRL data is not available for {}", CertUtils.toString(cert)); this.unavailableCRLPolicy.apply(null); return; } final List<X509CRL> expiredCrls = new ArrayList<>(); final List<X509CRLEntry> revokedCrls = new ArrayList<>(); final Iterator<X509CRL> it = crls.iterator(); while (it.hasNext()) { final X509CRL crl = it.next(); if (CertUtils.isExpired(crl)) { logger.warn("CRL data expired on {}", crl.getNextUpdate()); expiredCrls.add(crl); } } if (crls.size() == expiredCrls.size()) { logger.warn("All CRLs retrieved have expired. Applying CRL expiration policy..."); for (final X509CRL crl : expiredCrls) { this.expiredCRLPolicy.apply(crl); } } else { crls.removeAll(expiredCrls); logger.debug("Valid CRLs [{}] found that are not expired yet", crls); for (final X509CRL crl : crls) { final X509CRLEntry entry = crl.getRevokedCertificate(cert); if (entry != null) { revokedCrls.add(entry); } } if (revokedCrls.size() == crls.size()) { final X509CRLEntry entry = revokedCrls.get(0); logger.warn("All CRL entries have been revoked. Rejecting the first entry [{}]", entry); throw new RevokedCertificateException(entry); } } } /** * Sets the policy to apply when CRL data is unavailable. * * @param policy Revocation policy. */ public void setUnavailableCRLPolicy(final RevocationPolicy<Void> policy) { this.unavailableCRLPolicy = policy; } /** * Sets the policy to apply when CRL data is expired. * * @param policy Revocation policy. */ public void setExpiredCRLPolicy(final RevocationPolicy<X509CRL> policy) { this.expiredCRLPolicy = policy; } /** * Indicates whether all resources should be checked, * or revocation should stop at the first resource * that produces the cert. * * @param checkAll the check all */ public final void setCheckAll(final boolean checkAll) { this.checkAll = checkAll; } /** * Gets the first fetched CRL for the given certificate. * * @param cert Certificate for which the CRL of the issuing CA should be retrieved. * * @return CRL for given cert, or null */ public final X509CRL getCRL(final X509Certificate cert) { final Collection<X509CRL> list = getCRLs(cert); if (list != null && !list.isEmpty()) { return list.iterator().next(); } logger.debug("No CRL could be found for {}", CertUtils.toString(cert)); return null; } /** * Records the addition of a new CRL entry. * @param id the id of the entry to keep track of * @param crl new CRL entry * @return true if the entry was added successfully. * @since 4.1 */ protected abstract boolean addCRL(final Object id, final X509CRL crl); /** * Gets the collection of CRLs for the given certificate. * * @param cert Certificate for which the CRL of the issuing CA should be retrieved. * @return CRLs for given cert. */ protected abstract Collection<X509CRL> getCRLs(final X509Certificate cert); }
cas-server-support-x509/src/main/java/org/jasig/cas/adaptors/x509/authentication/handler/support/AbstractCRLRevocationChecker.java
/* * Licensed to Apereo under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Apereo licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at the following location: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jasig.cas.adaptors.x509.authentication.handler.support; import java.security.GeneralSecurityException; import java.security.cert.X509CRL; import java.security.cert.X509CRLEntry; import java.security.cert.X509Certificate; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import javax.validation.constraints.NotNull; import org.jasig.cas.adaptors.x509.util.CertUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Base class for all CRL-based revocation checkers. * * @author Marvin S. Addison * @since 3.4.6 * */ public abstract class AbstractCRLRevocationChecker implements RevocationChecker { /** Logger instance. **/ protected final Logger logger = LoggerFactory.getLogger(getClass()); /** * Flag to indicate whether all * crls should be checked for the cert resource. * Defaults to <code>false</code>. **/ protected boolean checkAll; /** Policy to apply when CRL data is unavailable. */ @NotNull private RevocationPolicy<Void> unavailableCRLPolicy = new DenyRevocationPolicy(); /** Policy to apply when CRL data has expired. */ @NotNull private RevocationPolicy<X509CRL> expiredCRLPolicy = new ThresholdExpiredCRLRevocationPolicy(); /** * {@inheritDoc} **/ @Override public void check(final X509Certificate cert) throws GeneralSecurityException { if (cert == null) { throw new IllegalArgumentException("Certificate cannot be null."); } logger.debug("Evaluating certificate revocation status for {}", CertUtils.toString(cert)); final Collection<X509CRL> crls = getCRLs(cert); if (crls == null || crls.isEmpty()) { logger.warn("CRL data is not available for {}", CertUtils.toString(cert)); this.unavailableCRLPolicy.apply(null); return; } final List<X509CRL> expiredCrls = new ArrayList<>(); final List<X509CRLEntry> revokedCrls = new ArrayList<>(); final Iterator<X509CRL> it = crls.iterator(); while (it.hasNext()) { final X509CRL crl = it.next(); if (CertUtils.isExpired(crl)) { logger.warn("CRL data expired on {}", crl.getNextUpdate()); expiredCrls.add(crl); } } if (crls.size() == expiredCrls.size()) { logger.warn("All CRLs retrieved have expired. Applying CRL expiration policy..."); for (final X509CRL crl : expiredCrls) { this.expiredCRLPolicy.apply(crl); } } else { crls.removeAll(expiredCrls); logger.debug("Valid CRLs [{}] found that are not expired yet", crls); for (final X509CRL crl : crls) { final X509CRLEntry entry = crl.getRevokedCertificate(cert); if (entry != null) { revokedCrls.add(entry); } } if (revokedCrls.size() == crls.size() && !revokedCrls.isEmpty()) { final X509CRLEntry entry = revokedCrls.get(0); logger.warn("All CRL entries have been revoked. Rejecting the first entry [{}]", entry); throw new RevokedCertificateException(entry); } } } /** * Sets the policy to apply when CRL data is unavailable. * * @param policy Revocation policy. */ public void setUnavailableCRLPolicy(final RevocationPolicy<Void> policy) { this.unavailableCRLPolicy = policy; } /** * Sets the policy to apply when CRL data is expired. * * @param policy Revocation policy. */ public void setExpiredCRLPolicy(final RevocationPolicy<X509CRL> policy) { this.expiredCRLPolicy = policy; } /** * Indicates whether all resources should be checked, * or revocation should stop at the first resource * that produces the cert. * * @param checkAll the check all */ public final void setCheckAll(final boolean checkAll) { this.checkAll = checkAll; } /** * Gets the first fetched CRL for the given certificate. * * @param cert Certificate for which the CRL of the issuing CA should be retrieved. * * @return CRL for given cert, or null */ public final X509CRL getCRL(final X509Certificate cert) { final Collection<X509CRL> list = getCRLs(cert); if (list != null && !list.isEmpty()) { return list.iterator().next(); } logger.debug("No CRL could be found for {}", CertUtils.toString(cert)); return null; } /** * Records the addition of a new CRL entry. * @param id the id of the entry to keep track of * @param crl new CRL entry * @return true if the entry was added successfully. * @since 4.1 */ protected abstract boolean addCRL(final Object id, final X509CRL crl); /** * Gets the collection of CRLs for the given certificate. * * @param cert Certificate for which the CRL of the issuing CA should be retrieved. * @return CRLs for given cert. */ protected abstract Collection<X509CRL> getCRLs(final X509Certificate cert); }
removed useless check
cas-server-support-x509/src/main/java/org/jasig/cas/adaptors/x509/authentication/handler/support/AbstractCRLRevocationChecker.java
removed useless check
<ide><path>as-server-support-x509/src/main/java/org/jasig/cas/adaptors/x509/authentication/handler/support/AbstractCRLRevocationChecker.java <ide> } <ide> } <ide> <del> if (revokedCrls.size() == crls.size() && !revokedCrls.isEmpty()) { <add> if (revokedCrls.size() == crls.size()) { <ide> final X509CRLEntry entry = revokedCrls.get(0); <ide> logger.warn("All CRL entries have been revoked. Rejecting the first entry [{}]", entry); <ide> throw new RevokedCertificateException(entry);
Java
apache-2.0
30a11ddb653d2e63714a1b1544b83cb3314ecb98
0
skmedix/JFoenix,jfoenixadmin/JFoenix
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.jfoenix.controls; import java.util.ArrayList; import java.util.Collections; import java.util.List; import javafx.animation.Interpolator; import javafx.animation.KeyFrame; import javafx.animation.KeyValue; import javafx.animation.Timeline; import javafx.animation.Transition; import javafx.beans.DefaultProperty; import javafx.beans.property.BooleanProperty; import javafx.beans.property.ObjectProperty; import javafx.beans.property.SimpleBooleanProperty; import javafx.beans.property.SimpleObjectProperty; import javafx.css.CssMetaData; import javafx.css.SimpleStyleableObjectProperty; import javafx.css.Styleable; import javafx.css.StyleableObjectProperty; import javafx.css.StyleableProperty; import javafx.event.EventHandler; import javafx.geometry.Pos; import javafx.scene.Parent; import javafx.scene.input.MouseEvent; import javafx.scene.layout.Background; import javafx.scene.layout.BackgroundFill; import javafx.scene.layout.CornerRadii; import javafx.scene.layout.Pane; import javafx.scene.layout.Region; import javafx.scene.layout.StackPane; import javafx.scene.paint.Color; import javafx.util.Duration; import com.jfoenix.controls.events.JFXDialogEvent; import com.jfoenix.converters.DialogTransitionConverter; import com.jfoenix.effects.JFXDepthManager; import com.jfoenix.transitions.CachedTransition; /** * @author Shadi Shaheen * note that for JFXDialog to work properly the root node should * be of type {@link StackPane} */ @DefaultProperty(value="content") public class JFXDialog extends StackPane { // public static enum JFXDialogLayout{PLAIN, HEADING, ACTIONS, BACKDROP}; public static enum DialogTransition{CENTER, TOP, RIGHT, BOTTOM, LEFT}; private StackPane contentHolder; private StackPane overlayPane; private double offsetX = 0; private double offsetY = 0; private Pane dialogContainer; private Region content; private Transition animation; private BooleanProperty overlayClose = new SimpleBooleanProperty(true); EventHandler<? super MouseEvent> closeHandler = (e)->close(); public JFXDialog(){ this(null,null,DialogTransition.CENTER); } /** * creates JFX Dialog control * @param dialogContainer * @param content * @param transitionType */ public JFXDialog(Pane dialogContainer, Region content, DialogTransition transitionType) { initialize(); setContent(content); setDialogContainer(dialogContainer); this.transitionType.set(transitionType); // init change listeners initChangeListeners(); } /** * creates JFX Dialog control * @param dialogContainer * @param content * @param transitionType * @param overlayClose */ public JFXDialog(Pane dialogContainer, Region content, DialogTransition transitionType, boolean overlayClose) { initialize(); setOverlayClose(overlayClose); setContent(content); setDialogContainer(dialogContainer); this.transitionType.set(transitionType); // init change listeners initChangeListeners(); } private void initChangeListeners(){ overlayCloseProperty().addListener((o,oldVal,newVal)->{ if(overlayPane!=null){ if(newVal) overlayPane.addEventHandler(MouseEvent.MOUSE_PRESSED, closeHandler); else overlayPane.removeEventHandler(MouseEvent.MOUSE_PRESSED, closeHandler); } }); } private void initialize() { this.setVisible(false); this.getStyleClass().add(DEFAULT_STYLE_CLASS); contentHolder = new StackPane(); contentHolder.setBackground(new Background(new BackgroundFill(Color.WHITE, new CornerRadii(2), null))); JFXDepthManager.setDepth(contentHolder, 4); contentHolder.setPickOnBounds(false); // ensure stackpane is never resized beyond it's preferred size contentHolder.setMaxSize(Region.USE_PREF_SIZE, Region.USE_PREF_SIZE); overlayPane = new StackPane(); overlayPane.getChildren().add(contentHolder); overlayPane.getStyleClass().add("jfx-dialog-overlay-pane"); StackPane.setAlignment(contentHolder, Pos.CENTER); overlayPane.setVisible(false); overlayPane.setBackground(new Background(new BackgroundFill(Color.rgb(0, 0, 0, 0.1), null, null))); // close the dialog if clicked on the overlay pane if(overlayClose.get()) overlayPane.addEventHandler(MouseEvent.MOUSE_PRESSED, closeHandler); // prevent propagating the events to overlay pane contentHolder.addEventHandler(MouseEvent.ANY, (e)->e.consume()); } /*************************************************************************** * * * Setters / Getters * * * **************************************************************************/ public Pane getDialogContainer() { return dialogContainer; } public void setDialogContainer(Pane dialogContainer) { if(dialogContainer!=null){ this.dialogContainer = dialogContainer; if(this.getChildren().indexOf(overlayPane)==-1)this.getChildren().setAll(overlayPane); this.visibleProperty().unbind(); this.visibleProperty().bind(overlayPane.visibleProperty()); if(this.dialogContainer.getChildren().indexOf(this)==-1 || this.dialogContainer.getChildren().indexOf(this)!=this.dialogContainer.getChildren().size()-1){ this.dialogContainer.getChildren().remove(this); this.dialogContainer.getChildren().add(this); } // FIXME: need to be improved to consider only the parent boundary offsetX = (this.getParent().getBoundsInLocal().getWidth()); offsetY = (this.getParent().getBoundsInLocal().getHeight()); animation = getShowAnimation(transitionType.get()); } } public Region getContent() { return content; } public void setContent(Region content) { if(content!=null){ this.content = content; contentHolder.getChildren().add(content); } } public final BooleanProperty overlayCloseProperty() { return this.overlayClose; } public final boolean isOverlayClose() { return this.overlayCloseProperty().get(); } public final void setOverlayClose(final boolean overlayClose) { this.overlayCloseProperty().set(overlayClose); } /*************************************************************************** * * * Public API * * * **************************************************************************/ public void show(Pane dialogContainer){ this.setDialogContainer(dialogContainer); animation.play(); } public void show(){ this.setDialogContainer(dialogContainer); // animation = getShowAnimation(transitionType.get()); animation.play(); } public void close(){ animation.setRate(-1); animation.play(); animation.setOnFinished((e)->{ resetProperties(); }); onDialogClosedProperty.get().handle(new JFXDialogEvent(JFXDialogEvent.CLOSED)); } /*************************************************************************** * * * Transitions * * * **************************************************************************/ private Transition getShowAnimation(DialogTransition transitionType){ Transition animation = null; if(contentHolder!=null){ switch (transitionType) { case LEFT: contentHolder.setTranslateX(-offsetX); animation = new LeftTransition(); break; case RIGHT: contentHolder.setTranslateX(offsetX); animation = new RightTransition(); break; case TOP: contentHolder.setTranslateY(-offsetY); animation = new TopTransition(); break; case BOTTOM: contentHolder.setTranslateY(offsetY); animation = new BottomTransition(); break; default: contentHolder.setScaleX(0); contentHolder.setScaleY(0); animation = new CenterTransition(); break; } } if(animation!=null)animation.setOnFinished((finish)->onDialogOpenedProperty.get().handle(new JFXDialogEvent(JFXDialogEvent.OPENED))); return animation; } private void resetProperties(){ overlayPane.setVisible(false); contentHolder.setTranslateX(0); contentHolder.setTranslateY(0); contentHolder.setScaleX(1); contentHolder.setScaleY(1); } private class LeftTransition extends CachedTransition { public LeftTransition() { super(contentHolder, new Timeline( new KeyFrame(Duration.ZERO, new KeyValue(contentHolder.translateXProperty(), -offsetX ,Interpolator.EASE_BOTH), new KeyValue(overlayPane.visibleProperty(), false ,Interpolator.EASE_BOTH) ), new KeyFrame(Duration.millis(10), new KeyValue(overlayPane.visibleProperty(), true ,Interpolator.EASE_BOTH), new KeyValue(overlayPane.opacityProperty(), 0,Interpolator.EASE_BOTH) ), new KeyFrame(Duration.millis(1000), new KeyValue(contentHolder.translateXProperty(), 0,Interpolator.EASE_BOTH), new KeyValue(overlayPane.opacityProperty(), 1,Interpolator.EASE_BOTH) )) ); // reduce the number to increase the shifting , increase number to reduce shifting setCycleDuration(Duration.seconds(0.4)); setDelay(Duration.seconds(0)); } } private class RightTransition extends CachedTransition { public RightTransition() { super(contentHolder, new Timeline( new KeyFrame(Duration.ZERO, new KeyValue(contentHolder.translateXProperty(), offsetX ,Interpolator.EASE_BOTH), new KeyValue(overlayPane.visibleProperty(), false ,Interpolator.EASE_BOTH) ), new KeyFrame(Duration.millis(10), new KeyValue(overlayPane.visibleProperty(), true ,Interpolator.EASE_BOTH), new KeyValue(overlayPane.opacityProperty(), 0, Interpolator.EASE_BOTH) ), new KeyFrame(Duration.millis(1000), new KeyValue(contentHolder.translateXProperty(), 0,Interpolator.EASE_BOTH), new KeyValue(overlayPane.opacityProperty(), 1, Interpolator.EASE_BOTH))) ); // reduce the number to increase the shifting , increase number to reduce shifting setCycleDuration(Duration.seconds(0.4)); setDelay(Duration.seconds(0)); } } private class TopTransition extends CachedTransition { public TopTransition() { super(contentHolder, new Timeline( new KeyFrame(Duration.ZERO, new KeyValue(contentHolder.translateYProperty(), -offsetY ,Interpolator.EASE_BOTH), new KeyValue(overlayPane.visibleProperty(), false ,Interpolator.EASE_BOTH) ), new KeyFrame(Duration.millis(10), new KeyValue(overlayPane.visibleProperty(), true ,Interpolator.EASE_BOTH), new KeyValue(overlayPane.opacityProperty(), 0, Interpolator.EASE_BOTH) ), new KeyFrame(Duration.millis(1000), new KeyValue(contentHolder.translateYProperty(), 0,Interpolator.EASE_BOTH), new KeyValue(overlayPane.opacityProperty(), 1, Interpolator.EASE_BOTH))) ); // reduce the number to increase the shifting , increase number to reduce shifting setCycleDuration(Duration.seconds(0.4)); setDelay(Duration.seconds(0)); } } private class BottomTransition extends CachedTransition { public BottomTransition() { super(contentHolder, new Timeline( new KeyFrame(Duration.ZERO, new KeyValue(contentHolder.translateYProperty(), offsetY ,Interpolator.EASE_BOTH), new KeyValue(overlayPane.visibleProperty(), false ,Interpolator.EASE_BOTH) ), new KeyFrame(Duration.millis(10), new KeyValue(overlayPane.visibleProperty(), true ,Interpolator.EASE_BOTH), new KeyValue(overlayPane.opacityProperty(), 0, Interpolator.EASE_BOTH) ), new KeyFrame(Duration.millis(1000), new KeyValue(contentHolder.translateYProperty(), 0,Interpolator.EASE_BOTH), new KeyValue(overlayPane.opacityProperty(), 1, Interpolator.EASE_BOTH))) ); // reduce the number to increase the shifting , increase number to reduce shifting setCycleDuration(Duration.seconds(0.4)); setDelay(Duration.seconds(0)); } } private class CenterTransition extends CachedTransition { public CenterTransition() { super(contentHolder, new Timeline( new KeyFrame(Duration.ZERO, new KeyValue(contentHolder.scaleXProperty(), 0 ,Interpolator.EASE_BOTH), new KeyValue(contentHolder.scaleYProperty(), 0 ,Interpolator.EASE_BOTH), new KeyValue(overlayPane.visibleProperty(), false ,Interpolator.EASE_BOTH) ), new KeyFrame(Duration.millis(10), new KeyValue(overlayPane.visibleProperty(), true ,Interpolator.EASE_BOTH), new KeyValue(overlayPane.opacityProperty(), 0,Interpolator.EASE_BOTH) ), new KeyFrame(Duration.millis(1000), new KeyValue(contentHolder.scaleXProperty(), 1 ,Interpolator.EASE_BOTH), new KeyValue(contentHolder.scaleYProperty(), 1 ,Interpolator.EASE_BOTH), new KeyValue(overlayPane.opacityProperty(), 1, Interpolator.EASE_BOTH) )) ); // reduce the number to increase the shifting , increase number to reduce shifting setCycleDuration(Duration.seconds(0.4)); setDelay(Duration.seconds(0)); } } /*************************************************************************** * * * Stylesheet Handling * * * **************************************************************************/ private static final String DEFAULT_STYLE_CLASS = "jfx-dialog"; private StyleableObjectProperty<DialogTransition> transitionType = new SimpleStyleableObjectProperty<DialogTransition>(StyleableProperties.DIALOG_TRANSITION, JFXDialog.this, "dialogTransition", DialogTransition.CENTER ); public DialogTransition getTransitionType(){ return transitionType == null ? DialogTransition.CENTER : transitionType.get(); } public StyleableObjectProperty<DialogTransition> transitionTypeProperty(){ return this.transitionType; } public void setTransitionType(DialogTransition transition){ this.transitionType.set(transition); } private static class StyleableProperties { private static final CssMetaData< JFXDialog, DialogTransition> DIALOG_TRANSITION = new CssMetaData< JFXDialog, DialogTransition>("-fx-dialog-transition", DialogTransitionConverter.getInstance(), DialogTransition.CENTER) { @Override public boolean isSettable(JFXDialog control) { return control.transitionType == null || !control.transitionType.isBound(); } @Override public StyleableProperty<DialogTransition> getStyleableProperty(JFXDialog control) { return control.transitionTypeProperty(); } }; private static final List<CssMetaData<? extends Styleable, ?>> CHILD_STYLEABLES; static { final List<CssMetaData<? extends Styleable, ?>> styleables = new ArrayList<CssMetaData<? extends Styleable, ?>>(Parent.getClassCssMetaData()); Collections.addAll(styleables, DIALOG_TRANSITION ); CHILD_STYLEABLES = Collections.unmodifiableList(styleables); } } // inherit the styleable properties from parent private List<CssMetaData<? extends Styleable, ?>> STYLEABLES; @Override public List<CssMetaData<? extends Styleable, ?>> getCssMetaData() { if(STYLEABLES == null){ final List<CssMetaData<? extends Styleable, ?>> styleables = new ArrayList<CssMetaData<? extends Styleable, ?>>(Parent.getClassCssMetaData()); styleables.addAll(getClassCssMetaData()); styleables.addAll(super.getClassCssMetaData()); STYLEABLES = Collections.unmodifiableList(styleables); } return STYLEABLES; } public static List<CssMetaData<? extends Styleable, ?>> getClassCssMetaData() { return StyleableProperties.CHILD_STYLEABLES; } /*************************************************************************** * * * Custom Events * * * **************************************************************************/ private ObjectProperty<EventHandler<? super JFXDialogEvent>> onDialogClosedProperty = new SimpleObjectProperty<>((closed)->{}); public void setOnDialogClosed(EventHandler<? super JFXDialogEvent> handler){ onDialogClosedProperty.set(handler); } public void getOnDialogClosed(EventHandler<? super JFXDialogEvent> handler){ onDialogClosedProperty.get(); } private ObjectProperty<EventHandler<? super JFXDialogEvent>> onDialogOpenedProperty = new SimpleObjectProperty<>((opened)->{}); public void setOnDialogOpened(EventHandler<? super JFXDialogEvent> handler){ onDialogOpenedProperty.set(handler); } public void getOnDialogOpened(EventHandler<? super JFXDialogEvent> handler){ onDialogOpenedProperty.get(); } }
src/com/jfoenix/controls/JFXDialog.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.jfoenix.controls; import java.util.ArrayList; import java.util.Collections; import java.util.List; import javafx.animation.Interpolator; import javafx.animation.KeyFrame; import javafx.animation.KeyValue; import javafx.animation.Timeline; import javafx.animation.Transition; import javafx.beans.DefaultProperty; import javafx.beans.property.BooleanProperty; import javafx.beans.property.ObjectProperty; import javafx.beans.property.SimpleBooleanProperty; import javafx.beans.property.SimpleObjectProperty; import javafx.css.CssMetaData; import javafx.css.SimpleStyleableObjectProperty; import javafx.css.Styleable; import javafx.css.StyleableObjectProperty; import javafx.css.StyleableProperty; import javafx.event.EventHandler; import javafx.geometry.Pos; import javafx.scene.Parent; import javafx.scene.input.MouseEvent; import javafx.scene.layout.Background; import javafx.scene.layout.BackgroundFill; import javafx.scene.layout.CornerRadii; import javafx.scene.layout.Pane; import javafx.scene.layout.Region; import javafx.scene.layout.StackPane; import javafx.scene.paint.Color; import javafx.util.Duration; import com.jfoenix.controls.events.JFXDialogEvent; import com.jfoenix.converters.DialogTransitionConverter; import com.jfoenix.effects.JFXDepthManager; import com.jfoenix.transitions.CachedTransition; /** * @author Shadi Shaheen * */ @DefaultProperty(value="content") public class JFXDialog extends StackPane { // public static enum JFXDialogLayout{PLAIN, HEADING, ACTIONS, BACKDROP}; public static enum DialogTransition{CENTER, TOP, RIGHT, BOTTOM, LEFT}; private StackPane contentHolder; private StackPane overlayPane; private double offsetX = 0; private double offsetY = 0; private Pane dialogContainer; private Region content; private Transition animation; private BooleanProperty overlayClose = new SimpleBooleanProperty(true); EventHandler<? super MouseEvent> closeHandler = (e)->close(); public JFXDialog(){ this(null,null,DialogTransition.CENTER); } /** * creates JFX Dialog control * @param dialogContainer * @param content * @param transitionType */ public JFXDialog(Pane dialogContainer, Region content, DialogTransition transitionType) { initialize(); setContent(content); setDialogContainer(dialogContainer); this.transitionType.set(transitionType); // init change listeners initChangeListeners(); } /** * creates JFX Dialog control * @param dialogContainer * @param content * @param transitionType * @param overlayClose */ public JFXDialog(Pane dialogContainer, Region content, DialogTransition transitionType, boolean overlayClose) { initialize(); setOverlayClose(overlayClose); setContent(content); setDialogContainer(dialogContainer); this.transitionType.set(transitionType); // init change listeners initChangeListeners(); } private void initChangeListeners(){ overlayCloseProperty().addListener((o,oldVal,newVal)->{ if(overlayPane!=null){ if(newVal) overlayPane.addEventHandler(MouseEvent.MOUSE_PRESSED, closeHandler); else overlayPane.removeEventHandler(MouseEvent.MOUSE_PRESSED, closeHandler); } }); } private void initialize() { this.setVisible(false); this.getStyleClass().add(DEFAULT_STYLE_CLASS); contentHolder = new StackPane(); contentHolder.setBackground(new Background(new BackgroundFill(Color.WHITE, new CornerRadii(2), null))); JFXDepthManager.setDepth(contentHolder, 4); contentHolder.setPickOnBounds(false); // ensure stackpane is never resized beyond it's preferred size contentHolder.setMaxSize(Region.USE_PREF_SIZE, Region.USE_PREF_SIZE); overlayPane = new StackPane(); overlayPane.getChildren().add(contentHolder); overlayPane.getStyleClass().add("jfx-dialog-overlay-pane"); StackPane.setAlignment(contentHolder, Pos.CENTER); overlayPane.setVisible(false); overlayPane.setBackground(new Background(new BackgroundFill(Color.rgb(0, 0, 0, 0.1), null, null))); // close the dialog if clicked on the overlay pane if(overlayClose.get()) overlayPane.addEventHandler(MouseEvent.MOUSE_PRESSED, closeHandler); // prevent propagating the events to overlay pane contentHolder.addEventHandler(MouseEvent.ANY, (e)->e.consume()); } /*************************************************************************** * * * Setters / Getters * * * **************************************************************************/ public Pane getDialogContainer() { return dialogContainer; } public void setDialogContainer(Pane dialogContainer) { if(dialogContainer!=null){ this.dialogContainer = dialogContainer; if(this.getChildren().indexOf(overlayPane)==-1)this.getChildren().setAll(overlayPane); this.visibleProperty().unbind(); this.visibleProperty().bind(overlayPane.visibleProperty()); if(this.dialogContainer.getChildren().indexOf(this)==-1 || this.dialogContainer.getChildren().indexOf(this)!=this.dialogContainer.getChildren().size()-1){ this.dialogContainer.getChildren().remove(this); this.dialogContainer.getChildren().add(this); } // FIXME: need to be improved to consider only the parent boundary offsetX = (this.getParent().getBoundsInLocal().getWidth()); offsetY = (this.getParent().getBoundsInLocal().getHeight()); animation = getShowAnimation(transitionType.get()); } } public Region getContent() { return content; } public void setContent(Region content) { if(content!=null){ this.content = content; contentHolder.getChildren().add(content); } } public final BooleanProperty overlayCloseProperty() { return this.overlayClose; } public final boolean isOverlayClose() { return this.overlayCloseProperty().get(); } public final void setOverlayClose(final boolean overlayClose) { this.overlayCloseProperty().set(overlayClose); } /*************************************************************************** * * * Public API * * * **************************************************************************/ public void show(Pane dialogContainer){ this.setDialogContainer(dialogContainer); animation.play(); } public void show(){ this.setDialogContainer(dialogContainer); // animation = getShowAnimation(transitionType.get()); animation.play(); } public void close(){ animation.setRate(-1); animation.play(); animation.setOnFinished((e)->{ resetProperties(); }); onDialogClosedProperty.get().handle(new JFXDialogEvent(JFXDialogEvent.CLOSED)); } /*************************************************************************** * * * Transitions * * * **************************************************************************/ private Transition getShowAnimation(DialogTransition transitionType){ Transition animation = null; if(contentHolder!=null){ switch (transitionType) { case LEFT: contentHolder.setTranslateX(-offsetX); animation = new LeftTransition(); break; case RIGHT: contentHolder.setTranslateX(offsetX); animation = new RightTransition(); break; case TOP: contentHolder.setTranslateY(-offsetY); animation = new TopTransition(); break; case BOTTOM: contentHolder.setTranslateY(offsetY); animation = new BottomTransition(); break; default: contentHolder.setScaleX(0); contentHolder.setScaleY(0); animation = new CenterTransition(); break; } } if(animation!=null)animation.setOnFinished((finish)->onDialogOpenedProperty.get().handle(new JFXDialogEvent(JFXDialogEvent.OPENED))); return animation; } private void resetProperties(){ overlayPane.setVisible(false); contentHolder.setTranslateX(0); contentHolder.setTranslateY(0); contentHolder.setScaleX(1); contentHolder.setScaleY(1); } private class LeftTransition extends CachedTransition { public LeftTransition() { super(contentHolder, new Timeline( new KeyFrame(Duration.ZERO, new KeyValue(contentHolder.translateXProperty(), -offsetX ,Interpolator.EASE_BOTH), new KeyValue(overlayPane.visibleProperty(), false ,Interpolator.EASE_BOTH) ), new KeyFrame(Duration.millis(10), new KeyValue(overlayPane.visibleProperty(), true ,Interpolator.EASE_BOTH), new KeyValue(overlayPane.opacityProperty(), 0,Interpolator.EASE_BOTH) ), new KeyFrame(Duration.millis(1000), new KeyValue(contentHolder.translateXProperty(), 0,Interpolator.EASE_BOTH), new KeyValue(overlayPane.opacityProperty(), 1,Interpolator.EASE_BOTH) )) ); // reduce the number to increase the shifting , increase number to reduce shifting setCycleDuration(Duration.seconds(0.4)); setDelay(Duration.seconds(0)); } } private class RightTransition extends CachedTransition { public RightTransition() { super(contentHolder, new Timeline( new KeyFrame(Duration.ZERO, new KeyValue(contentHolder.translateXProperty(), offsetX ,Interpolator.EASE_BOTH), new KeyValue(overlayPane.visibleProperty(), false ,Interpolator.EASE_BOTH) ), new KeyFrame(Duration.millis(10), new KeyValue(overlayPane.visibleProperty(), true ,Interpolator.EASE_BOTH), new KeyValue(overlayPane.opacityProperty(), 0, Interpolator.EASE_BOTH) ), new KeyFrame(Duration.millis(1000), new KeyValue(contentHolder.translateXProperty(), 0,Interpolator.EASE_BOTH), new KeyValue(overlayPane.opacityProperty(), 1, Interpolator.EASE_BOTH))) ); // reduce the number to increase the shifting , increase number to reduce shifting setCycleDuration(Duration.seconds(0.4)); setDelay(Duration.seconds(0)); } } private class TopTransition extends CachedTransition { public TopTransition() { super(contentHolder, new Timeline( new KeyFrame(Duration.ZERO, new KeyValue(contentHolder.translateYProperty(), -offsetY ,Interpolator.EASE_BOTH), new KeyValue(overlayPane.visibleProperty(), false ,Interpolator.EASE_BOTH) ), new KeyFrame(Duration.millis(10), new KeyValue(overlayPane.visibleProperty(), true ,Interpolator.EASE_BOTH), new KeyValue(overlayPane.opacityProperty(), 0, Interpolator.EASE_BOTH) ), new KeyFrame(Duration.millis(1000), new KeyValue(contentHolder.translateYProperty(), 0,Interpolator.EASE_BOTH), new KeyValue(overlayPane.opacityProperty(), 1, Interpolator.EASE_BOTH))) ); // reduce the number to increase the shifting , increase number to reduce shifting setCycleDuration(Duration.seconds(0.4)); setDelay(Duration.seconds(0)); } } private class BottomTransition extends CachedTransition { public BottomTransition() { super(contentHolder, new Timeline( new KeyFrame(Duration.ZERO, new KeyValue(contentHolder.translateYProperty(), offsetY ,Interpolator.EASE_BOTH), new KeyValue(overlayPane.visibleProperty(), false ,Interpolator.EASE_BOTH) ), new KeyFrame(Duration.millis(10), new KeyValue(overlayPane.visibleProperty(), true ,Interpolator.EASE_BOTH), new KeyValue(overlayPane.opacityProperty(), 0, Interpolator.EASE_BOTH) ), new KeyFrame(Duration.millis(1000), new KeyValue(contentHolder.translateYProperty(), 0,Interpolator.EASE_BOTH), new KeyValue(overlayPane.opacityProperty(), 1, Interpolator.EASE_BOTH))) ); // reduce the number to increase the shifting , increase number to reduce shifting setCycleDuration(Duration.seconds(0.4)); setDelay(Duration.seconds(0)); } } private class CenterTransition extends CachedTransition { public CenterTransition() { super(contentHolder, new Timeline( new KeyFrame(Duration.ZERO, new KeyValue(contentHolder.scaleXProperty(), 0 ,Interpolator.EASE_BOTH), new KeyValue(contentHolder.scaleYProperty(), 0 ,Interpolator.EASE_BOTH), new KeyValue(overlayPane.visibleProperty(), false ,Interpolator.EASE_BOTH) ), new KeyFrame(Duration.millis(10), new KeyValue(overlayPane.visibleProperty(), true ,Interpolator.EASE_BOTH), new KeyValue(overlayPane.opacityProperty(), 0,Interpolator.EASE_BOTH) ), new KeyFrame(Duration.millis(1000), new KeyValue(contentHolder.scaleXProperty(), 1 ,Interpolator.EASE_BOTH), new KeyValue(contentHolder.scaleYProperty(), 1 ,Interpolator.EASE_BOTH), new KeyValue(overlayPane.opacityProperty(), 1, Interpolator.EASE_BOTH) )) ); // reduce the number to increase the shifting , increase number to reduce shifting setCycleDuration(Duration.seconds(0.4)); setDelay(Duration.seconds(0)); } } /*************************************************************************** * * * Stylesheet Handling * * * **************************************************************************/ private static final String DEFAULT_STYLE_CLASS = "jfx-dialog"; private StyleableObjectProperty<DialogTransition> transitionType = new SimpleStyleableObjectProperty<DialogTransition>(StyleableProperties.DIALOG_TRANSITION, JFXDialog.this, "dialogTransition", DialogTransition.CENTER ); public DialogTransition getTransitionType(){ return transitionType == null ? DialogTransition.CENTER : transitionType.get(); } public StyleableObjectProperty<DialogTransition> transitionTypeProperty(){ return this.transitionType; } public void setTransitionType(DialogTransition transition){ this.transitionType.set(transition); } private static class StyleableProperties { private static final CssMetaData< JFXDialog, DialogTransition> DIALOG_TRANSITION = new CssMetaData< JFXDialog, DialogTransition>("-fx-dialog-transition", DialogTransitionConverter.getInstance(), DialogTransition.CENTER) { @Override public boolean isSettable(JFXDialog control) { return control.transitionType == null || !control.transitionType.isBound(); } @Override public StyleableProperty<DialogTransition> getStyleableProperty(JFXDialog control) { return control.transitionTypeProperty(); } }; private static final List<CssMetaData<? extends Styleable, ?>> CHILD_STYLEABLES; static { final List<CssMetaData<? extends Styleable, ?>> styleables = new ArrayList<CssMetaData<? extends Styleable, ?>>(Parent.getClassCssMetaData()); Collections.addAll(styleables, DIALOG_TRANSITION ); CHILD_STYLEABLES = Collections.unmodifiableList(styleables); } } // inherit the styleable properties from parent private List<CssMetaData<? extends Styleable, ?>> STYLEABLES; @Override public List<CssMetaData<? extends Styleable, ?>> getCssMetaData() { if(STYLEABLES == null){ final List<CssMetaData<? extends Styleable, ?>> styleables = new ArrayList<CssMetaData<? extends Styleable, ?>>(Parent.getClassCssMetaData()); styleables.addAll(getClassCssMetaData()); styleables.addAll(super.getClassCssMetaData()); STYLEABLES = Collections.unmodifiableList(styleables); } return STYLEABLES; } public static List<CssMetaData<? extends Styleable, ?>> getClassCssMetaData() { return StyleableProperties.CHILD_STYLEABLES; } /*************************************************************************** * * * Custom Events * * * **************************************************************************/ private ObjectProperty<EventHandler<? super JFXDialogEvent>> onDialogClosedProperty = new SimpleObjectProperty<>((closed)->{}); public void setOnDialogClosed(EventHandler<? super JFXDialogEvent> handler){ onDialogClosedProperty.set(handler); } public void getOnDialogClosed(EventHandler<? super JFXDialogEvent> handler){ onDialogClosedProperty.get(); } private ObjectProperty<EventHandler<? super JFXDialogEvent>> onDialogOpenedProperty = new SimpleObjectProperty<>((opened)->{}); public void setOnDialogOpened(EventHandler<? super JFXDialogEvent> handler){ onDialogOpenedProperty.set(handler); } public void getOnDialogOpened(EventHandler<? super JFXDialogEvent> handler){ onDialogOpenedProperty.get(); } }
Note that JFXDialog requires the root to be stack pane to work properly
src/com/jfoenix/controls/JFXDialog.java
Note that JFXDialog requires the root to be stack pane to work properly
<ide><path>rc/com/jfoenix/controls/JFXDialog.java <ide> <ide> /** <ide> * @author Shadi Shaheen <del> * <add> * note that for JFXDialog to work properly the root node should <add> * be of type {@link StackPane} <ide> */ <ide> @DefaultProperty(value="content") <ide> public class JFXDialog extends StackPane {
Java
mit
ac3965447d9e9d1c066f89eafb67c558975b7965
0
appaloosa-store/appaloosa-android-tools
package appaloosa_store.com.appaloosa_android_tools.analytics.db; import android.content.ContentValues; import android.content.Context; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteOpenHelper; import android.util.Pair; import com.google.gson.JsonArray; import com.google.gson.JsonParser; import java.util.ArrayList; import java.util.List; import appaloosa_store.com.appaloosa_android_tools.analytics.model.Event; public class AnalyticsDb extends SQLiteOpenHelper { private static final int DB_VERSION = 1; private static final String DB_NAME = "analytics"; private static final String TABLE_EVENT = "event"; private static final String CREATE_TABLE = "CREATE TABLE " + TABLE_EVENT + "(" + DBColumn.ID + " INTEGER PRIMARY KEY NOT NULL, " + DBColumn.EVENT + " TEXT);"; private SQLiteDatabase db; private final Object lock = new Object(); public AnalyticsDb(Context context) { super(context, DB_NAME, null, DB_VERSION); db = getWritableDatabase(); } @Override public void onCreate(SQLiteDatabase db) { db.execSQL(CREATE_TABLE); } @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { db.execSQL("DROP TABLE IF EXISTS " + TABLE_EVENT + ";"); onCreate(db); } public boolean insertEvent(Event event) { ContentValues value = new ContentValues(); value.put(DBColumn.EVENT.toString(), event.toJson().toString()); synchronized (lock) { return db.insert(TABLE_EVENT, null, value) != -1; } } public int countEvents() { Cursor c = db.rawQuery("SELECT COUNT(*) FROM " + TABLE_EVENT, null); if (c.moveToNext()) { return c.getInt(0); } c.close(); return 0; } public Pair<List<Integer>, JsonArray> getOldestEvents(int batchSize) { List<Integer> eventsIds = new ArrayList<>(); JsonArray events = new JsonArray(); JsonParser parser = new JsonParser(); synchronized (lock) { Cursor cursor = db.query(TABLE_EVENT, DBColumn.COLUMNS, null, null, null, null, DBColumn.ID.toString() + " ASC", batchSize + ""); while (cursor.moveToNext()) { events.add(parser.parse(cursor.getString(DBColumn.EVENT.getIndex()))); eventsIds.add(cursor.getInt(DBColumn.ID.getIndex())); } cursor.close(); } return new Pair<>(eventsIds, events); } public boolean deleteEvent(int id) { synchronized (lock) { return db.delete(TABLE_EVENT, DBColumn.ID.toString() + " = ?", new String[] {"" + id}) > 0; } } public boolean deleteEvents(List<Integer> ids) { boolean allDeleted = true; for (int id : ids) { allDeleted &= deleteEvent(id); } return allDeleted; } }
app/src/main/java/appaloosa_store/com/appaloosa_android_tools/analytics/db/AnalyticsDb.java
package appaloosa_store.com.appaloosa_android_tools.analytics.db; import android.content.ContentValues; import android.content.Context; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteOpenHelper; import android.util.Pair; import com.google.gson.JsonArray; import com.google.gson.JsonParser; import java.util.ArrayList; import java.util.List; import appaloosa_store.com.appaloosa_android_tools.analytics.model.Event; public class AnalyticsDb extends SQLiteOpenHelper { private static final int DB_VERSION = 1; private static final String DB_NAME = "analytics"; private static final String TABLE_EVENT = "event"; private static final String CREATE_TABLE = "CREATE TABLE " + TABLE_EVENT + "(" + DBColumn.ID + " INTEGER PRIMARY KEY NOT NULL, " + DBColumn.EVENT + " TEXT);"; private SQLiteDatabase db; private final Object lock = new Object(); public AnalyticsDb(Context context) { super(context, DB_NAME, null, DB_VERSION); db = getWritableDatabase(); } @Override public void onCreate(SQLiteDatabase db) { db.execSQL(CREATE_TABLE); } @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { db.execSQL("DROP TABLE IF EXISTS " + TABLE_EVENT + ";"); onCreate(db); } public boolean insertEvent(Event event) { ContentValues value = new ContentValues(); value.put(DBColumn.EVENT.toString(), event.toJson().toString()); synchronized (lock) { return db.insert(TABLE_EVENT, null, value) != -1; } } public int countEvents() { Cursor c = db.rawQuery("SELECT COUNT(*) FROM " + TABLE_EVENT, null); if (c.moveToNext()) { return c.getInt(0); } return 0; } public Pair<List<Integer>, JsonArray> getOldestEvents(int batchSize) { List<Integer> eventsIds = new ArrayList<>(); JsonArray events = new JsonArray(); JsonParser parser = new JsonParser(); synchronized (lock) { Cursor cursor = db.query(TABLE_EVENT, DBColumn.COLUMNS, null, null, null, null, DBColumn.ID.toString() + " ASC", batchSize + ""); while (cursor.moveToNext()) { events.add(parser.parse(cursor.getString(DBColumn.EVENT.getIndex()))); eventsIds.add(cursor.getInt(DBColumn.ID.getIndex())); } cursor.close(); } return new Pair<>(eventsIds, events); } public boolean deleteEvent(int id) { synchronized (lock) { return db.delete(TABLE_EVENT, DBColumn.ID.toString() + " = ?", new String[] {"" + id}) > 0; } } public boolean deleteEvents(List<Integer> ids) { boolean allDeleted = true; for (int id : ids) { allDeleted &= deleteEvent(id); } return allDeleted; } }
Quick fix to AnalyticsDb
app/src/main/java/appaloosa_store/com/appaloosa_android_tools/analytics/db/AnalyticsDb.java
Quick fix to AnalyticsDb
<ide><path>pp/src/main/java/appaloosa_store/com/appaloosa_android_tools/analytics/db/AnalyticsDb.java <ide> if (c.moveToNext()) { <ide> return c.getInt(0); <ide> } <add> c.close(); <ide> return 0; <ide> } <ide>
JavaScript
mit
24cbd0eba4521ab8cbe32361321187e447d66a79
0
SavageCore/yadg-pth-userscript,SavageCore/yadg-pth-userscript
// ==UserScript== // @id pth-yadg // @name PTH YADG // @description This script provides integration with online description generator YADG (http://yadg.cc) - Credit to Slack06 // @license https://github.com/SavageCore/yadg-pth-userscript/blob/master/LICENSE // @version 1.3.10 // @namespace yadg // @grant GM_xmlhttpRequest // @require https://yadg.cc/static/js/jsandbox.min.js // @include http*://*passtheheadphones.me/upload.php* // @include http*://*passtheheadphones.me/requests.php* // @include http*://*passtheheadphones.me/torrents.php* // @include http*://*waffles.ch/upload.php* // @include http*://*waffles.ch/requests.php* // @downloadURL https://github.com/SavageCore/yadg-pth-userscript/raw/master/pth_yadg.user.js // ==/UserScript== // --------- USER SETTINGS START --------- /* Here you can set site specific default templates. You can find a list of available templates at: https://yadg.cc/api/v2/templates/ */ var defaultPTHFormat = 5, defaultWafflesFormat = 9; // --------- USER SETTINGS END --------- // --------- THIRD PARTY CODE AREA START --------- // // Creates an object which gives some helper methods to // Save/Load/Remove data to/from the localStorage // // Source from: https://github.com/gergob/localstoragewrapper // function LocalStorageWrapper (applicationPrefix) { "use strict"; if(applicationPrefix == undefined) { throw new Error('applicationPrefix parameter should be defined'); } var delimiter = '_'; //if the passed in value for prefix is not string, it should be converted var keyPrefix = typeof(applicationPrefix) === 'string' ? applicationPrefix : JSON.stringify(applicationPrefix); var localStorage = window.localStorage||unsafeWindow.localStorage; var isLocalStorageAvailable = function() { return typeof(localStorage) != undefined }; var getKeyPrefix = function() { return keyPrefix; }; // // validates if there is a prefix defined for the keys // and checks if the localStorage functionality is available or not // var makeChecks = function(key) { var prefix = getKeyPrefix(); if(prefix == undefined) { throw new Error('No prefix was defined, data cannot be saved'); } if(!isLocalStorageAvailable()) { throw new Error('LocalStorage is not supported by your browser, data cannot be saved'); } //keys are always strings var checkedKey = typeof(key) === 'string' ? key : JSON.stringify(key); return checkedKey; }; // // saves the value associated to the key into the localStorage // var addItem = function(key, value) { var that = this; try{ var checkedKey = makeChecks(key); var combinedKey = that.getKeyPrefix() + delimiter + checkedKey; localStorage.setItem(combinedKey, JSON.stringify(value)); } catch(error) { console.log(error); throw error; } }; // // gets the value of the object saved to the key passed as parameter // var getItem = function(key) { var that = this; var result = undefined; try{ var checkedKey = makeChecks(key); var combinedKey = that.getKeyPrefix() + delimiter + checkedKey; var resultAsJSON = localStorage.getItem(combinedKey); result = JSON.parse(resultAsJSON); } catch(error) { console.log(error); throw error; } return result; }; // // returns all the keys from the localStorage // var getAllKeys = function() { var prefix = getKeyPrefix(); var results = []; if(prefix == undefined) { throw new Error('No prefix was defined, data cannot be saved'); } if(!isLocalStorageAvailable()) { throw new Error('LocalStorage is not supported by your browser, data cannot be saved'); } for(var key in localStorage) { if(key.indexOf(prefix) == 0) { var keyParts = key.split(delimiter); results.push(keyParts[1]); } } return results; }; // // removes the value associated to the key from the localStorage // var removeItem = function(key) { var that = this; var result = false; try{ var checkedKey = makeChecks(key); var combinedKey = that.getKeyPrefix() + delimiter + checkedKey; localStorage.removeItem(combinedKey); result = true; } catch(error) { console.log(error); throw error; } return result; }; // // removes all the values from the localStorage // var removeAll = function() { var that = this; try{ var allKeys = that.getAllKeys(); for(var i=0; i < allKeys.length; ++i) { var checkedKey = makeChecks(allKeys[i]); var combinedKey = that.getKeyPrefix() + delimiter + checkedKey; localStorage.removeItem(combinedKey); } } catch(error) { console.log(error); throw error; } }; // make some of the functionalities public return { isLocalStorageAvailable : isLocalStorageAvailable, getKeyPrefix : getKeyPrefix, addItem : addItem, getItem : getItem, getAllKeys : getAllKeys, removeItem : removeItem, removeAll : removeAll } } // --------- THIRD PARTY CODE AREA END --------- var yadg_util = { exec : function exec(fn) { var script = document.createElement('script'); script.setAttribute("type", "application/javascript"); script.textContent = '(' + fn + ')();'; document.body.appendChild(script); // run the script document.body.removeChild(script); // clean up }, // handle for updating page css, taken from one of hateradio's scripts addCSS : function(style) { if(!this.style) { this.style = document.createElement('style'); this.style.type = 'text/css'; (document.head || document.getElementsByTagName('head')[0]).appendChild(this.style); } this.style.appendChild(document.createTextNode(style+'\n')); }, setValueIfSet: function(value,input,cond) { if (cond) { input.value = value; } else { input.value = ''; } }, // negative count will remove, positive count will add given number of artist boxes addRemoveArtistBoxes : function(count) { if (count != 0) { if (count < 0) { for (var i = 0; i < -count; i++) { yadg_util.exec(function() {RemoveArtistField()}); } } else { for (var i = 0; i < count; i++) { yadg_util.exec(function() {AddArtistField()}); } } } }, getOptionOffsets : function(select) { var option_offsets = {}; for (var j = 0; j < select.options.length; j++) { option_offsets[select.options[j].value] = select.options[j].index; } return option_offsets; }, storage : new LocalStorageWrapper("yadg"), settings : new LocalStorageWrapper("yadgSettings") }; // very simple wrapper for XmlHttpRequest function requester(url, method, callback, data, error_callback) { this.data = data; this.url = url; this.method = method; if (!error_callback) { error_callback = yadg.failed_callback; } this.send = function() { var details = { url : this.url, method : this.method, onload : function(response) { if (response.status === 200) { callback(JSON.parse(response.responseText)); } else if (response.status === 401) { yadg.failed_authentication_callback(); } else { error_callback(); } }, onerror : error_callback }; if (method == "POST") { details.data = JSON.stringify(this.data); } var headers = { "Accept" : "application/json", "Content-Type" : "application/json" }; if (yadg_util.settings.getItem(factory.KEY_API_TOKEN)) { headers.Authorization = "Token " + yadg_util.settings.getItem(factory.KEY_API_TOKEN); } details.headers = headers; GM_xmlhttpRequest(details); }; } var yadg_sandbox = { KEY_LAST_WARNING : "templateLastWarning", init : function(callback) { GM_xmlhttpRequest({ method: 'GET', url: yadg.yadgHost + '/static/js/jsandbox-worker.js', onload: function(response) { var script, dataURL = null; if (response.status === 200) { script = response.responseText; var blob = new Blob([script], {type: 'application/javascript'}); var URL = window.URL || window.webkitURL; if (!URL || !URL.createObjectURL) { throw new Error('No no valid implementation of window.URL.createObjectURL found.'); } dataURL = URL.createObjectURL(blob); yadg_sandbox.initCallback(dataURL); yadg_sandbox.loadSwig(callback); } else { yadg_sandbox.initCallbackError(); } }, onerror: function() { yadg_sandbox.initCallbackError(); } }); }, loadSwig : function(callback) { // importScripts for the web worker will not work in Firefox with cross-domain requests // see: https://bugzilla.mozilla.org/show_bug.cgi?id=756589 // so download the Swig files manually with GM_xmlhttpRequest GM_xmlhttpRequest({ method: 'GET', url: yadg.yadgHost + "/static/js/swig.min.js", onload: function(response) { if (response.status === 200) { yadg_sandbox.swig_script = response.responseText; GM_xmlhttpRequest({ method: 'GET', url: yadg.yadgHost + "/static/js/swig.custom.js", onload: function(response) { if (response.status === 200) { yadg_sandbox.swig_custom_script = response.responseText; callback(); } } }); } } }); }, initializeSwig : function(dependencies) { if (!(this.swig_script && this.swig_custom_script)) { yadg.failed_callback(); return } yadg_sandbox.exec({data: this.swig_script, onerror: yadg.failed_callback}); yadg_sandbox.exec({data: this.swig_custom_script, onerror: yadg.failed_callback}); yadg_sandbox.exec({data: "var myswig = new swig.Swig({ loader: swig.loaders.memory(input.templates), autoescape: false }), i=0; yadg_filters.register_filters(myswig);", input: {templates: dependencies}}); }, renderTemplate : function(template, data, callback, error) { var eval_string = "myswig.render(input.template, { locals: input.data, filename: 'scratchpad' + (i++) })"; this.eval({data: eval_string, callback: function(out) {callback(out);}, input: {template: template, data: data}, onerror: function(err){error(err);}}); }, initCallback : function(dataUrl) { JSandbox.url = dataUrl; this.jsandbox = new JSandbox(); this.initError = false; }, resetSandbox : function() { this.jsandbox.terminate(); this.jsandbox = new JSandbox(); }, load : function(options) { this.jsandbox.load(options); }, exec : function(options) { this.jsandbox.exec(options); }, eval : function(options) { this.jsandbox.eval(options); }, initCallbackError : function() { this.initError = true; var last_warning = yadg_util.storage.getItem(this.KEY_LAST_WARNING), now = new Date(); if (last_warning === null || now.getTime() - (new Date(last_warning)).getTime() > factory.CACHE_TIMEOUT) { alert("Could not load the necessary script files for executing YADG. If this error persists you might need to update the user script. You will only get this message once a day."); yadg_util.storage.addItem(this.KEY_LAST_WARNING, now); } } }; var factory = { // storage keys for cache KEY_LAST_CHECKED : "lastChecked", KEY_SCRAPER_LIST : "scraperList", KEY_FORMAT_LIST : "formatList", // storage keys for settings KEY_API_TOKEN : "apiToken", KEY_DEFAULT_TEMPLATE : "defaultTemplate", KEY_DEFAULT_SCRAPER : "defaultScraper", KEY_REPLACE_DESCRIPTION : "replaceDescriptionOn", KEY_SETTINGS_INIT_VER : "settingsInitializedVer", CACHE_TIMEOUT : 1000*60*60*24, // 24 hours UPDATE_PROGRESS : 0, locations : new Array( { name : 'pth_upload', regex : /http(s)?\:\/\/(.*\.)?passtheheadphones\.me\/upload\.php.*/i }, { name : 'pth_edit', regex : /http(s)?\:\/\/(.*\.)?passtheheadphones\.me\/torrents\.php\?action=editgroup&groupid=.*/i }, { name : 'pth_request', regex : /http(s)?\:\/\/(.*\.)?passtheheadphones\.me\/requests\.php\?action=new/i }, { name : 'pth_request_edit', regex : /http(s)?\:\/\/(.*\.)?passtheheadphones\.me\/requests\.php\?action=edit&id=.*/i }, { name : 'pth_torrent_overview', regex : /http(s)?\:\/\/(.*\.)?passtheheadphones\.me\/torrents\.php\?id=.*/i }, { name : 'waffles_upload', // regex : /http(s)?\:\/\/(.*\.)?waffles\.ch\/upload\.php\?legacy=1.*/i // }, // { // TODO: reenable support for the new Waffles upload page once it is reactivated // name : 'waffles_upload_new', regex : /http(s)?\:\/\/(.*\.)?waffles\.ch\/upload\.php.*/i }, { name : 'waffles_request', regex : /http(s)?\:\/\/(.*\.)?waffles\.ch\/requests\.php\?do=add/i } ), determineLocation : function(uri) { for (var i = 0; i < this.locations.length; i++) { if (this.locations[i].regex.test(uri)) { return this.locations[i].name; } } return null; }, init : function() { this.currentLocation = this.determineLocation(document.URL); // only continue with the initialization if we found a valid location if (this.currentLocation !== null) { this.insertIntoPage(this.getInputElements()); // set the necessary styles this.setStyles(); // make sure we initialize the settings to the most recent version this.initializeSettings(); // populate settings inputs this.populateSettings(); // add the appropriate action for the button var button = document.getElementById('yadg_submit'); button.addEventListener('click', function (e) { e.preventDefault(); yadg.makeRequest(); }, false); // add the action for the options toggle var toggleLink = document.getElementById('yadg_toggle_options'); if (toggleLink !== null) { toggleLink.addEventListener('click', function (e) { e.preventDefault(); var optionsDiv = document.getElementById('yadg_options'), display = optionsDiv.style.display; if (display == 'none' || display == '') { optionsDiv.style.display = 'block'; } else { optionsDiv.style.display = 'none'; } }); } // add the action for the template select var formatSelect = this.getFormatSelect(); if (formatSelect !== null) { formatSelect.addEventListener('change', function (e) { if (yadg_renderer.hasCached()) { yadg_renderer.renderCached(this.value, factory.setDescriptionBoxValue, factory.setDescriptionBoxValue); } }); } // add the action to the save settings link var saveSettingsLink = document.getElementById('yadg_save_settings'); if (saveSettingsLink !== null) { saveSettingsLink.addEventListener('click', function (e) { e.preventDefault(); factory.saveSettings(); alert("Settings saved successfully."); }); } // add the action to the clear cache link var clearCacheLink = document.getElementById('yadg_clear_cache'); if (clearCacheLink !== null) { clearCacheLink.addEventListener('click', function (e) { e.preventDefault(); yadg_util.storage.removeAll(); alert("Cache cleared. Please reload the page for this to take effect."); }); } var last_checked = yadg_util.storage.getItem(factory.KEY_LAST_CHECKED); if (last_checked === null || (new Date()).getTime() - (new Date(last_checked)).getTime() > factory.CACHE_TIMEOUT) { // update the scraper and formats list factory.UPDATE_PROGRESS = 1; yadg.getScraperList(factory.setScraperSelect); yadg.getFormatsList(factory.setFormatSelect); } else { factory.setScraperSelect(yadg_util.storage.getItem(factory.KEY_SCRAPER_LIST)); factory.setFormatSelect(yadg_util.storage.getItem(factory.KEY_FORMAT_LIST)); } return true; } else { return false; } }, getApiTokenInput : function() { return document.getElementById('yadg_api_token'); }, getReplaceDescriptionCheckbox : function() { return document.getElementById('yadg_options_replace'); }, getReplaceDescriptionSettingKey : function() { return this.makeReplaceDescriptionSettingsKey(this.currentLocation); }, makeReplaceDescriptionSettingsKey : function(subKey) { return this.KEY_REPLACE_DESCRIPTION + subKey.replace(/_/g, ""); }, initializeSettings : function() { var settings_ver = yadg_util.settings.getItem(factory.KEY_SETTINGS_INIT_VER), current_ver = 1; if (!settings_ver) { settings_ver = 0; } if (settings_ver < current_ver) { // replace descriptions on upload and new request pages var locations = [ 'pth_upload', 'pth_request', 'waffles_upload', 'waffles_upload_new', 'waffles_request' ]; for (var i = 0; i < locations.length; i++) { var loc = locations[i], replace_desc_setting_key = factory.makeReplaceDescriptionSettingsKey(loc); yadg_util.settings.addItem(replace_desc_setting_key, true); } } yadg_util.settings.addItem(factory.KEY_SETTINGS_INIT_VER, current_ver); }, populateSettings : function() { var api_token = yadg_util.settings.getItem(factory.KEY_API_TOKEN), replace_desc = yadg_util.settings.getItem(factory.getReplaceDescriptionSettingKey()); if (api_token) { var api_token_input = factory.getApiTokenInput(); api_token_input.value = api_token; } if (replace_desc) { var replace_desc_checkbox = factory.getReplaceDescriptionCheckbox(); replace_desc_checkbox.checked = true; } }, saveSettings : function() { var scraper_select = factory.getScraperSelect(), template_select = factory.getFormatSelect(), api_token_input = factory.getApiTokenInput(), replace_desc_checkbox = factory.getReplaceDescriptionCheckbox(); var current_scraper = null, current_template = null, api_token = api_token_input.value.trim(), replace_description = replace_desc_checkbox.checked; if (scraper_select.options.length > 0) { current_scraper = scraper_select.options[scraper_select.selectedIndex].value; } if (template_select.options.length > 0) { current_template = template_select.options[template_select.selectedIndex].value; } if (current_scraper !== null) { yadg_util.settings.addItem(factory.KEY_DEFAULT_SCRAPER, current_scraper); } if (current_template !== null) { yadg_util.settings.addItem(factory.KEY_DEFAULT_TEMPLATE, current_template); } if (api_token !== "") { yadg_util.settings.addItem(factory.KEY_API_TOKEN, api_token); } else { yadg_util.settings.removeItem(factory.KEY_API_TOKEN); } var replace_desc_setting_key = factory.getReplaceDescriptionSettingKey(); if (replace_description) { yadg_util.settings.addItem(replace_desc_setting_key, true); } else { yadg_util.settings.removeItem(replace_desc_setting_key); } }, setDescriptionBoxValue : function(value) { var desc_box = factory.getDescriptionBox(), replace_desc_checkbox = factory.getReplaceDescriptionCheckbox(), replace_desc = false; if (replace_desc_checkbox !== null) { replace_desc = replace_desc_checkbox.checked; } if (desc_box !== null) { if (!replace_desc && /\S/.test(desc_box.value)) { // check if the current description contains more than whitespace desc_box.value += "\n\n" + value; } else { desc_box.value = value; } } }, getFormatSelect : function() { return document.getElementById('yadg_format'); }, setDefaultFormat : function() { var format_select = factory.getFormatSelect(); var format_offsets = yadg_util.getOptionOffsets(format_select); var default_format = yadg_util.settings.getItem(factory.KEY_DEFAULT_TEMPLATE); if (default_format !== null && default_format in format_offsets) { format_select.selectedIndex = format_offsets[default_format]; } else { // we have no settings so fall back to the hard coded defaults switch (this.currentLocation) { case "waffles_upload": case "waffles_upload_new": case "waffles_request": format_select.selectedIndex = format_offsets[defaultWafflesFormat]; break; default: format_select.selectedIndex = format_offsets[defaultPTHFormat]; break; } } }, getScraperSelect : function() { return document.getElementById("yadg_scraper"); }, setDefaultScraper : function() { var default_scraper = yadg_util.settings.getItem(factory.KEY_DEFAULT_SCRAPER); if (default_scraper !== null) { var scraper_select = factory.getScraperSelect(); var scraper_offsets = yadg_util.getOptionOffsets(scraper_select); if (default_scraper in scraper_offsets) { scraper_select.selectedIndex = scraper_offsets[default_scraper]; } } }, setScraperSelect : function(scrapers) { var scraper_select = factory.getScraperSelect(); factory.setSelect(scraper_select, scrapers); factory.setDefaultScraper(); if (factory.UPDATE_PROGRESS > 0) { yadg_util.storage.addItem(factory.KEY_SCRAPER_LIST, scrapers); factory.UPDATE_PROGRESS |= 1<<1; if (factory.UPDATE_PROGRESS === 7) { yadg_util.storage.addItem(factory.KEY_LAST_CHECKED, new Date()); } } }, setFormatSelect : function(templates) { var format_select = factory.getFormatSelect(); var non_utility = []; var save_templates = []; for (var i = 0; i < templates.length; i++) { if (factory.UPDATE_PROGRESS > 0) { yadg_templates.addTemplate(templates[i]); save_templates.push({ id : templates[i]['id'], url : templates[i]['url'], name : templates[i]['name'], nameFormatted : templates[i]['nameFormatted'], owner : templates[i]['owner'], default : templates[i]['default'], isUtility : templates[i]['isUtility'] }); } else { yadg_templates.addTemplateUrl(templates[i]['id'], templates[i]['url']); } if (!templates[i]['isUtility']) { non_utility.push(templates[i]); } } factory.setSelect(format_select, non_utility); factory.setDefaultFormat(); if (factory.UPDATE_PROGRESS > 0) { yadg_util.storage.addItem(factory.KEY_FORMAT_LIST, save_templates); factory.UPDATE_PROGRESS |= 1<<2; if (factory.UPDATE_PROGRESS === 7) { yadg_util.storage.addItem(factory.KEY_LAST_CHECKED, new Date()); } } }, setSelect : function(select, data) { select.options.length = data.length; for (var i = 0; i < data.length; i++) { // we are not using the javascript constructor to create an Option instance because this will create an // incompatibility with jQuery in Chrome which will make it impossible to add a new artist field on passtheheadphones.me var o = document.createElement("option"); if ('nameFormatted' in data[i]) { o.text = data[i]['nameFormatted']; } else { o.text = data[i]['name']; } o.value = data[i]['value'] || data[i]['id']; o.selected = data[i]['default']; select.options[i] = o; if (data[i]['default']) { select.selectedIndex = i; } if (data[i]['url']) { o.setAttribute('data-url', data[i]['url']); } } }, setStyles : function() { // general styles yadg_util.addCSS('div#yadg_options{ display:none; margin-top:3px; } input#yadg_input,input#yadg_submit,label#yadg_format_label,a#yadg_scraper_info { margin-right: 5px } div#yadg_response { margin-top:3px; } select#yadg_scraper { margin-right: 2px } #yadg_options_template,#yadg_options_api_token,#yadg_options_replace_div { margin-bottom: 3px; } .add_form[name="yadg"] input,.add_form[name="yadg"] select { width: 90%; margin: 2px 0 !important; } div.box { border:none !important }'); // location specific styles will go here switch(this.currentLocation) { case "waffles_upload": yadg_util.addCSS('div#yadg_response ul { margin-left: 0 !important; padding-left: 0 !important; }'); break; case "waffles_request": yadg_util.addCSS('div#yadg_response ul { margin-left: 0 !important; padding-left: 0 !important; }'); break; default: break; } }, getInputElements : function() { var buttonHTML = '<input type="submit" value="Fetch" id="yadg_submit"/>', scraperSelectHTML = '<select name="yadg_scraper" id="yadg_scraper"></select>', optionsHTML = '<div id="yadg_options"><div id="yadg_options_template"><label for="yadg_format" id="yadg_format_label">Template:</label><select name="yadg_format" id="yadg_format"></select></div><div id="yadg_options_api_token"><label for="yadg_api_token" id="yadg_api_token_label">API token (<a href="https://yadg.cc/api/token" target="_blank">Get one here</a>):</label> <input type="text" name="yadg_api_token" id="yadg_api_token" size="50" /></div><div id="yadg_options_replace_div"><input type="checkbox" name="yadg_options_replace" id="yadg_options_replace" /> <label for="yadg_options_replace" id="yadg_options_replace_label">Replace descriptions on this page</label></div><div id="yadg_options_links"><a id="yadg_save_settings" href="#" title="Save the currently selected scraper and template as default for this site and save the given API token.">Save settings</a> <span class="yadg_separator">|</span> <a id="yadg_clear_cache" href="#">Clear cache</a></div></div>', inputHTML = '<input type="text" name="yadg_input" id="yadg_input" size="60" />', responseDivHTML = '<div id="yadg_response"></div>', toggleOptionsLinkHTML = '<a id="yadg_toggle_options" href="#">Toggle options</a>', scraperInfoLink = '<a id="yadg_scraper_info" href="https://yadg.cc/available-scrapers" target="_blank" title="Get additional information on the available scrapers">[?]</a>'; switch (this.currentLocation) { case "pth_upload": var tr = document.createElement('tr'); tr.className = "yadg_tr"; tr.innerHTML = '<td class="label">YADG:</td><td>' + inputHTML + scraperSelectHTML + scraperInfoLink + buttonHTML + toggleOptionsLinkHTML + optionsHTML + responseDivHTML + '</td>'; return tr; case "pth_edit": var div = document.createElement('div'); div.className = "yadg_div"; div.innerHTML = '<h3 class="label">YADG:</h3>\n' + inputHTML + '\n' + scraperSelectHTML + '\n' + scraperInfoLink + '\n' + buttonHTML + '\n' + toggleOptionsLinkHTML + '\n' + optionsHTML + '\n' + responseDivHTML; return div; case "pth_torrent_overview": var div = document.createElement('div'); div.id = 'yadg_div' div.className = 'box'; div.innerHTML = '<div class="head"><strong>YADG</strong></div>\n<div class="body">\n<form class="add_form" name="yadg" method="post">\n<input type="text" name="yadg_input" id="yadg_input" />\n' + scraperSelectHTML + '\n' + scraperInfoLink + '\n' + buttonHTML + '\n' + toggleOptionsLinkHTML + '\n' + optionsHTML + '\n' + responseDivHTML; return div; case "pth_request": case "pth_request_edit": var tr = document.createElement('tr'); tr.className = "yadg_tr"; tr.innerHTML = '<td class="label">YADG:</td><td>' + inputHTML + scraperSelectHTML + scraperInfoLink + buttonHTML + toggleOptionsLinkHTML + optionsHTML + responseDivHTML + '</td>'; return tr; case "waffles_upload": var tr = document.createElement('tr'); tr.className = "yadg_tr"; tr.innerHTML = '<td class="heading" valign="top" align="right"><label for="yadg_input">YADG:</label></td><td>' + inputHTML + scraperSelectHTML + scraperInfoLink + buttonHTML + toggleOptionsLinkHTML + optionsHTML + responseDivHTML + '</td>'; return tr; case "waffles_upload_new": var p = document.createElement('p'); p.className = "yadg_p"; p.innerHTML = '<label for="yadg_input">YADG:</label>' + inputHTML + scraperSelectHTML + scraperInfoLink + buttonHTML + toggleOptionsLinkHTML + optionsHTML + responseDivHTML; return p; case "waffles_request": var tr = document.createElement('tr'); tr.className = "yadg_tr"; tr.innerHTML = '<td style="text-align:left;width:100px;">YADG:</td><td style="text-align:left;">' + inputHTML + scraperSelectHTML + scraperInfoLink + buttonHTML + toggleOptionsLinkHTML + optionsHTML + responseDivHTML + '</td>'; return tr; default: // that should actually never happen return document.createElement('div'); } }, insertIntoPage : function(element) { switch (this.currentLocation) { case "pth_upload": var year_tr = document.getElementById('year_tr'); year_tr.parentNode.insertBefore(element,year_tr); break; case "pth_edit": var summary_input = document.getElementsByName('summary')[0]; summary_input.parentNode.insertBefore(element,summary_input.nextSibling.nextSibling); break; case "pth_torrent_overview": var add_artists_box = document.getElementsByClassName("box_addartists")[0]; add_artists_box.appendChild(element); break; case "pth_request": case "pth_request_edit": var artist_tr = document.getElementById('artist_tr'); artist_tr.parentNode.insertBefore(element,artist_tr); break; case "waffles_upload": var submit_button = document.getElementsByName('submit')[0]; submit_button.parentNode.parentNode.parentNode.insertBefore(element,submit_button.parentNode.parentNode); break; case "waffles_upload_new": var h4s = document.getElementsByTagName('h4'); var div; for (var i=0; i < h4s.length; i++) { if (h4s[i].innerHTML.indexOf('read the rules') !== -1) { div = h4s[i].parentNode; break; } } div.appendChild(element); break; case "waffles_request": var category_select = document.getElementsByName('category')[0]; category_select.parentNode.parentNode.parentNode.insertBefore(element,category_select.parentNode.parentNode); break; default: break; } }, getDescriptionBox : function() { switch (this.currentLocation) { case "pth_upload": return document.getElementById('album_desc'); case "pth_edit": return document.getElementsByName('body')[0]; case "pth_torrent_overview": if (!this.hasOwnProperty("dummybox")) { this.dummybox = document.createElement('div'); } return this.dummybox; case "pth_request": case "pth_request_edit": return document.getElementsByName('description')[0]; case "waffles_upload": return document.getElementById('descr'); case "waffles_upload_new": return document.getElementById('id_descr'); case "waffles_request": return document.getElementsByName('information')[0]; default: // that should actually never happen return document.createElement('div'); } }, getFormFillFunction : function() { switch (this.currentLocation) { case "pth_upload": var f = function(rawData) { var artist_inputs = document.getElementsByName("artists[]"), album_title_input = document.getElementById("title"), year_input = document.getElementById("year"), label_input = document.getElementById("record_label"), catalog_input = document.getElementById("catalogue_number"), tags_input = document.getElementById("tags"), data = yadg.prepareRawResponse(rawData); if (data.artists != false) { var input_idx = 0; yadg_util.addRemoveArtistBoxes(data.effective_artist_count - artist_inputs.length); artist_inputs = document.getElementsByName("artists[]"); for (var i = 0; i < data.artist_keys.length; i++) { var artist_key = data.artist_keys[i], artist_types = data.artists[artist_key]; for (var j = 0; j < artist_types.length; j++) { var artist_type = artist_types[j], artist_input = artist_inputs[input_idx], type_select = artist_input.nextSibling; while (type_select.tagName != 'SELECT') { type_select = type_select.nextSibling; } artist_input.value = artist_key; var option_offsets = yadg_util.getOptionOffsets(type_select); if (artist_type === "main") { type_select.selectedIndex = option_offsets[1]; } else if (artist_type === "guest") { type_select.selectedIndex = option_offsets[2]; } else if (artist_type === "remixer") { type_select.selectedIndex = option_offsets[3]; } else { // we don't know this artist type, default to "main" type_select.selectedIndex = option_offsets[1]; } // next artist input input_idx += 1; } } } else { for (var i = 0; i < artist_inputs.length; i++) { artist_inputs[i].value = ''; } } if (data.tags != false) { tags_input.value = data.tag_string.toLowerCase(); } else { tags_input.value = ''; } yadg_util.setValueIfSet(data.year,year_input,data.year != false); yadg_util.setValueIfSet(data.title,album_title_input,data.title != false); yadg_util.setValueIfSet(data.label,label_input,data.label != false); yadg_util.setValueIfSet(data.catalog,catalog_input,data.catalog != false); }; return f; case "pth_edit": f = function(rawData) { var summary_input = document.getElementsByName("summary")[0]; var data = yadg.prepareRawResponse(rawData); summary_input.value = 'YADG Update'; }; return f; case "pth_torrent_overview": f = function(rawData) { var artist_inputs = document.getElementsByName("aliasname[]"), data = yadg.prepareRawResponse(rawData); if (data.artists != false) { var input_idx = 0; yadg_util.addRemoveArtistBoxes(data.effective_artist_count - artist_inputs.length); artist_inputs = document.getElementsByName("aliasname[]"); for (var i = 0; i < data.artist_keys.length; i++) { var artist_key = data.artist_keys[i], artist_types = data.artists[artist_key]; for (var j = 0; j < artist_types.length; j++) { var artist_type = artist_types[j], artist_input = artist_inputs[input_idx], type_select = artist_input.nextSibling; while (type_select.tagName != 'SELECT') { type_select = type_select.nextSibling; } artist_input.value = artist_key; var option_offsets = yadg_util.getOptionOffsets(type_select); if (artist_type === "main") { type_select.selectedIndex = option_offsets[1]; } else if (artist_type === "guest") { type_select.selectedIndex = option_offsets[2]; } else if (artist_type === "remixer") { type_select.selectedIndex = option_offsets[3]; } else { // we don't know this artist type, default to "main" type_select.selectedIndex = option_offsets[1]; } // next artist input input_idx += 1; } } } else { for (var i = 0; i < artist_inputs.length; i++) { artist_inputs[i].value = ''; } } }; return f; case "pth_request": case "pth_request_edit": var f = function(rawData) { var artist_inputs = document.getElementsByName("artists[]"), album_title_input = document.getElementsByName("title")[0], year_input = document.getElementsByName("year")[0], label_input = document.getElementsByName("recordlabel")[0], catalog_input = document.getElementsByName("cataloguenumber")[0], tags_input = document.getElementById("tags"), data = yadg.prepareRawResponse(rawData); if (data.artists != false) { var input_idx = 0; yadg_util.addRemoveArtistBoxes(data.effective_artist_count - artist_inputs.length); artist_inputs = document.getElementsByName("artists[]"); for (var i = 0; i < data.artist_keys.length; i++) { var artist_key = data.artist_keys[i], artist_types = data.artists[artist_key]; for (var j = 0; j < artist_types.length; j++) { var artist_type = artist_types[j], artist_input = artist_inputs[input_idx], type_select = artist_input.nextSibling; while (type_select.tagName != 'SELECT') { type_select = type_select.nextSibling; } artist_input.value = artist_key; var option_offsets = yadg_util.getOptionOffsets(type_select); if (artist_type === "main") { type_select.selectedIndex = option_offsets[1]; } else if (artist_type === "guest") { type_select.selectedIndex = option_offsets[2]; } else if (artist_type === "remixer") { type_select.selectedIndex = option_offsets[3]; } else { // we don't know this artist type, default to "main" type_select.selectedIndex = option_offsets[1]; } // next artist input input_idx += 1; } } } else { for (var i = 0; i < artist_inputs.length; i++) { artist_inputs[i].value = ''; } } if (data.tags != false) { tags_input.value = data.tag_string.toLowerCase(); } else { tags_input.value = ''; } yadg_util.setValueIfSet(data.year,year_input,data.year != false); yadg_util.setValueIfSet(data.title,album_title_input,data.title != false); yadg_util.setValueIfSet(data.label,label_input,data.label != false); yadg_util.setValueIfSet(data.catalog,catalog_input,data.catalog != false); }; return f; case "waffles_upload": var f = function(rawData) { var artist_input = document.getElementsByName("artist")[0], album_title_input = document.getElementsByName("album")[0], year_input = document.getElementsByName("year")[0], va_checkbox = document.getElementById("va"), tags_input = document.getElementById("tags"), data = yadg.prepareRawResponse(rawData); if (data.artists != false) { if (data.is_various) { artist_input.value = ""; va_checkbox.checked = true; } else { artist_input.value = data.flat_artist_string; va_checkbox.checked = false; } } else { va_checkbox.checked = false; artist_input.value = ""; } yadg_util.setValueIfSet(data.year,year_input,data.year != false); yadg_util.setValueIfSet(data.title,album_title_input,data.title != false); if (data.tags != false) { tags_input.value = data.tag_string_nodots.toLowerCase(); } else { tags_input.value = ''; } yadg_util.exec(function() {formatName()}); }; return f; case "waffles_upload_new": var f = function(rawData) { var artist_input = document.getElementById("id_artist"), album_title_input = document.getElementById("id_album"), year_input = document.getElementById("id_year"), va_checkbox = document.getElementById("id_va"), tags_input = document.getElementById("id_tags"), data = yadg.prepareRawResponse(rawData); if (data.artists != false) { if (data.is_various) { if (!va_checkbox.checked) { va_checkbox.click(); } } else { if (va_checkbox.checked) { va_checkbox.click(); } artist_input.value = data.flat_artist_string; } } else { if (va_checkbox.checked) { va_checkbox.click(); } artist_input.value = ""; } yadg_util.setValueIfSet(data.year,year_input,data.year != false); yadg_util.setValueIfSet(data.title,album_title_input,data.title != false); if (data.tags != false) { tags_input.value = data.tag_string_nodots.toLowerCase(); } else { tags_input.value = ''; } }; return f; case "waffles_request": var f = function(rawData) { var artist_input = document.getElementsByName("artist")[0], album_title_input = document.getElementsByName("title")[0], year_input = document.getElementsByName("year")[0], data = yadg.prepareRawResponse(rawData); if (data.artists != false) { if (data.is_various) { artist_input.value = "Various Artists"; } else { artist_input.value = data.flat_artist_string; } } else { artist_input.value = ""; } yadg_util.setValueIfSet(data.year,year_input,data.year != false); yadg_util.setValueIfSet(data.title,album_title_input,data.title != false); }; return f; default: // that should actually never happen return function(data) {}; } } }; var yadg_templates = { _templates : {}, _template_urls : {}, getTemplate : function(id, callback) { if (id in this._templates) { callback(this._templates[id]); } else if (id in this._template_urls) { var request = new requester(this._template_urls[id], 'GET', function(template) { yadg_templates.addTemplate(template); callback(template); }, null, yadg_templates.errorTemplate); request.send(); } else { this.errorTemplate(); } }, addTemplate : function(template) { this._templates[template.id] = template; }, addTemplateUrl : function(id, url) { this._template_urls[id] = url; }, errorTemplate : function() { yadg.printError("Could not get template. Please choose another one.", true); } }; var yadg_renderer = { _last_data : null, _last_template_id : null, render : function(template_id, data, callback, error_callback) { this._last_data = data; var new_template = this._last_template_id !== template_id; this._last_template_id = template_id; yadg_templates.getTemplate(template_id, function(template) { // the new template might have different dependencies, so initialize Swig with those if (new_template) { yadg_sandbox.resetSandbox(); yadg_sandbox.initializeSwig(template.dependencies); } yadg_sandbox.renderTemplate(template.code, data, callback, error_callback); }); }, renderCached : function(template_id, callback, error_callback) { if (this.hasCached()) { this.render(template_id, this._last_data, callback, error_callback); } }, hasCached : function() { return this._last_data !== null; }, clearCached : function() { this._last_data = null; } }; var yadg = { yadgHost : "https://yadg.cc", baseURI : "/api/v2/", standardError : "Sorry, an error occured. Please try again. If this error persists the user script might need updating.", authenticationError : "Your API token is invalid. Please provide a valid API token or remove the current one.", lastStateError : false, isBusy : false, init : function() { this.scraperSelect = document.getElementById('yadg_scraper'); this.formatSelect = document.getElementById('yadg_format'); this.input = document.getElementById('yadg_input'); this.responseDiv = document.getElementById('yadg_response'); this.button = document.getElementById('yadg_submit'); }, getBaseURL : function() { return this.yadgHost + this.baseURI; }, getScraperList : function(callback) { var url = this.getBaseURL() + "scrapers/"; var request = new requester(url, 'GET', callback); request.send(); }, getFormatsList : function(callback) { var url = this.getBaseURL() + "templates/"; this.getTemplates(url, [], callback); }, getTemplates : function(url, templates, callback) { var request = new requester(url, 'GET', function(data) { for (var i = 0; i < data.results.length; i++) { templates.push(data.results[i]); } if (data.next !== null) { yadg.getTemplates(data.next, templates, callback); } else { callback(templates); } }); request.send(); }, makeRequest : function(params) { if (this.isBusy) return; var data; if (params) { data = params; } else { data = { scraper: this.scraperSelect.options[this.scraperSelect.selectedIndex].value, input: this.input.value }; } var url = this.getBaseURL() + 'query/'; if (data.input !== '') { var request = new requester(url, 'POST', function(result) { yadg.getResult(result.url); }, data); this.busyStart(); request.send(); } }, getResult : function(result_url) { var request = new requester(result_url, 'GET', function(response) { if (response.status == "done") { if (response.data.type == 'ReleaseResult') { var template_id = yadg.formatSelect.options[yadg.formatSelect.selectedIndex].value; yadg_renderer.render(template_id, response, factory.setDescriptionBoxValue, factory.setDescriptionBoxValue); if (yadg.lastStateError == true) { yadg.responseDiv.innerHTML = ""; yadg.lastStateError = false; } var fillFunc = factory.getFormFillFunction(); fillFunc(response.data); } else if (response.data.type == 'ListResult') { var ul = document.createElement('ul'); ul.id = "yadg_release_list"; var release_list = response.data.items; for (var i = 0; i < release_list.length;i++) { var name = release_list[i]['name'], info = release_list[i]['info'], query_params = release_list[i]['queryParams'], release_url = release_list[i]['url']; var li = document.createElement('li'), a = document.createElement('a'); a.textContent = name; a.params = query_params; a.href = release_url; a.addEventListener('click',function(e) { e.preventDefault(); yadg.makeRequest(this.params);},false); li.appendChild(a); li.appendChild(document.createElement('br')); li.appendChild(document.createTextNode(info)); ul.appendChild(li); } if (ul.childNodes.length != 0) { yadg.responseDiv.innerHTML = ""; yadg.responseDiv.appendChild(ul); yadg.lastStateError = false; // we got a ListResult so clear the last ReleaseResult from the render cache yadg_renderer.clearCached(); } else { yadg.printError('Sorry, there were no matches.'); } } else if (response.data.type == 'NotFoundResult') { yadg.printError('I could not find the release with the given ID. You may want to try again with another one.'); } else { yadg.printError('Something weird happened. Please try again'); } yadg.busyStop(); } else if (response.status == 'failed') { yadg.failed_callback(); } else { var delay = function() { yadg.getResult(response.url); }; window.setTimeout(delay, 1000); } }); request.send(); }, printError : function(message, template_error) { this.responseDiv.innerHTML = ""; this.responseDiv.appendChild(document.createTextNode(message)); if (!template_error) { this.lastStateError = true; // there was a non template related error, so for consistencies sake clear the last ReleaseResult from the // render cache yadg_renderer.clearCached(); } }, failed_callback : function() { yadg.printError(yadg.standardError); yadg.busyStop(); }, failed_authentication_callback : function() { yadg.printError(yadg.authenticationError); yadg.busyStop(); }, busyStart : function() { this.isBusy = true; this.button.setAttribute('disabled',true); this.button.value = "Please wait..."; this.input.setAttribute('disabled',true); this.scraperSelect.setAttribute('disabled',true); this.formatSelect.setAttribute('disabled',true); }, busyStop : function() { this.button.removeAttribute('disabled'); this.button.value = "Fetch"; this.input.removeAttribute('disabled'); this.scraperSelect.removeAttribute('disabled'); this.formatSelect.removeAttribute('disabled'); this.isBusy = false; }, prepareRawResponse : function(rawData) { var result = {}; result.artists = false; result.year = false; result.title = false; result.label = false; result.catalog = false; result.genre = false; result.style = false; result.tags = false; result.is_various = false; result.flat_artist_string = false; if (rawData.artists.length > 0) { result.artists = {}; for (var i = 0; i < rawData.artists.length; i++) { var artist = rawData.artists[i]; if (!artist["isVarious"]) { result.artists[artist["name"]] = artist["types"]; } else { result.is_various = true; } } } if (rawData.discs.length > 0) { for (var k = 0; k < rawData.discs.length; k++) { var disc = rawData.discs[k]; for (var l = 0; l < disc["tracks"].length; l++) { var track = disc["tracks"][l]; for (var m = 0; m < track["artists"].length; m++) { var name = track["artists"][m]["name"], type = track["artists"][m]["types"]; var newTypes = null; if (name in result.artists) { newTypes = result.artists[name].concat(type); // deduplicate new types array for(var i = 0; i < newTypes.length; ++i) { for(var j = i+1; j < newTypes.length; ++j) { if(newTypes[i] === newTypes[j]) newTypes.splice(j--, 1); } } } else { newTypes = type; } result.artists[name] = newTypes; } } } } for (var i = 0; i < rawData['releaseEvents'].length; i++) { var event = rawData['releaseEvents'][i]; if (event.date) { result.year = event.date.match(/\d{4}/)[0]; if (result.year.length != 4) { result.year = false; } else { break; } } } if (rawData.title) { result.title = rawData.title; } if (rawData.labelIds.length > 0) { var labelId = rawData['labelIds'][0]; if (labelId.label) { result.label = labelId.label; } if (labelId.catalogueNrs.length > 0) { result.catalog = labelId.catalogueNrs[0]; } } if (rawData.genres.length > 0) { result.genre = rawData.genres; } if (rawData.styles.length > 0) { result.style = rawData.styles; } if (result.genre != false && result.style != false) { result.tags = rawData.genres.concat(rawData.styles); } else if (result.genre != false) { result.tags = rawData.genres; } else if (result.style != false) { result.tags = rawData.styles; } if (result.tags != false) { result.tag_string = ""; result.tag_string_nodots = ""; for (var i = 0; i < result.tags.length; i++) { result.tag_string = result.tag_string + result.tags[i].replace(/\s+/g,'.'); result.tag_string_nodots = result.tag_string_nodots + result.tags[i].replace(/\s+/g,' '); if (i != result.tags.length-1) { result.tag_string = result.tag_string + ', '; result.tag_string_nodots = result.tag_string_nodots + ', '; } } } if (result.artists != false) { // count the artists result.artists_length = 0; result.artist_keys = []; result.effective_artist_count = 0; for (var i in result.artists) { if (result.artists.hasOwnProperty(i)) { result.artists_length++; result.artist_keys.push(i); result.effective_artist_count += result.artists[i].length; } } } if (result.artists_length == 0) { result.artists = false; } else { // create a flat string of all the main artists var artist_string = ""; for (var i = 0; i < result.artists_length; i++) { if (result.artists[result.artist_keys[i]].indexOf("main") != -1) { if (artist_string != "" && i < result.artists_length - 2) { artist_string = artist_string + ", "; } else if (artist_string != "" && i < result.artists_length - 1) { artist_string = artist_string + " & "; } artist_string = artist_string + result.artist_keys[i]; } } result.flat_artist_string = artist_string; } return result; } }; yadg_sandbox.init(function() { if (factory.init()) { // returns true if we run on a valid location yadg.init(); } });
pth_yadg.user.js
// ==UserScript== // @id pth-yadg // @name passtheheadphones.me - YADG // @description This script provides integration with online description generator YADG (http://yadg.cc) - Credit to Slack06 // @license https://github.com/SavageCore/yadg-pth-userscript/blob/master/LICENSE // @version 1.3.10 // @namespace yadg // @grant GM_xmlhttpRequest // @require https://yadg.cc/static/js/jsandbox.min.js // @include http*://*passtheheadphones.me/upload.php* // @include http*://*passtheheadphones.me/requests.php* // @include http*://*passtheheadphones.me/torrents.php* // @include http*://*waffles.ch/upload.php* // @include http*://*waffles.ch/requests.php* // @downloadURL https://github.com/SavageCore/yadg-pth-userscript/raw/master/pth_yadg.user.js // ==/UserScript== // --------- USER SETTINGS START --------- /* Here you can set site specific default templates. You can find a list of available templates at: https://yadg.cc/api/v2/templates/ */ var defaultPTHFormat = 5, defaultWafflesFormat = 9; // --------- USER SETTINGS END --------- // --------- THIRD PARTY CODE AREA START --------- // // Creates an object which gives some helper methods to // Save/Load/Remove data to/from the localStorage // // Source from: https://github.com/gergob/localstoragewrapper // function LocalStorageWrapper (applicationPrefix) { "use strict"; if(applicationPrefix == undefined) { throw new Error('applicationPrefix parameter should be defined'); } var delimiter = '_'; //if the passed in value for prefix is not string, it should be converted var keyPrefix = typeof(applicationPrefix) === 'string' ? applicationPrefix : JSON.stringify(applicationPrefix); var localStorage = window.localStorage||unsafeWindow.localStorage; var isLocalStorageAvailable = function() { return typeof(localStorage) != undefined }; var getKeyPrefix = function() { return keyPrefix; }; // // validates if there is a prefix defined for the keys // and checks if the localStorage functionality is available or not // var makeChecks = function(key) { var prefix = getKeyPrefix(); if(prefix == undefined) { throw new Error('No prefix was defined, data cannot be saved'); } if(!isLocalStorageAvailable()) { throw new Error('LocalStorage is not supported by your browser, data cannot be saved'); } //keys are always strings var checkedKey = typeof(key) === 'string' ? key : JSON.stringify(key); return checkedKey; }; // // saves the value associated to the key into the localStorage // var addItem = function(key, value) { var that = this; try{ var checkedKey = makeChecks(key); var combinedKey = that.getKeyPrefix() + delimiter + checkedKey; localStorage.setItem(combinedKey, JSON.stringify(value)); } catch(error) { console.log(error); throw error; } }; // // gets the value of the object saved to the key passed as parameter // var getItem = function(key) { var that = this; var result = undefined; try{ var checkedKey = makeChecks(key); var combinedKey = that.getKeyPrefix() + delimiter + checkedKey; var resultAsJSON = localStorage.getItem(combinedKey); result = JSON.parse(resultAsJSON); } catch(error) { console.log(error); throw error; } return result; }; // // returns all the keys from the localStorage // var getAllKeys = function() { var prefix = getKeyPrefix(); var results = []; if(prefix == undefined) { throw new Error('No prefix was defined, data cannot be saved'); } if(!isLocalStorageAvailable()) { throw new Error('LocalStorage is not supported by your browser, data cannot be saved'); } for(var key in localStorage) { if(key.indexOf(prefix) == 0) { var keyParts = key.split(delimiter); results.push(keyParts[1]); } } return results; }; // // removes the value associated to the key from the localStorage // var removeItem = function(key) { var that = this; var result = false; try{ var checkedKey = makeChecks(key); var combinedKey = that.getKeyPrefix() + delimiter + checkedKey; localStorage.removeItem(combinedKey); result = true; } catch(error) { console.log(error); throw error; } return result; }; // // removes all the values from the localStorage // var removeAll = function() { var that = this; try{ var allKeys = that.getAllKeys(); for(var i=0; i < allKeys.length; ++i) { var checkedKey = makeChecks(allKeys[i]); var combinedKey = that.getKeyPrefix() + delimiter + checkedKey; localStorage.removeItem(combinedKey); } } catch(error) { console.log(error); throw error; } }; // make some of the functionalities public return { isLocalStorageAvailable : isLocalStorageAvailable, getKeyPrefix : getKeyPrefix, addItem : addItem, getItem : getItem, getAllKeys : getAllKeys, removeItem : removeItem, removeAll : removeAll } } // --------- THIRD PARTY CODE AREA END --------- var yadg_util = { exec : function exec(fn) { var script = document.createElement('script'); script.setAttribute("type", "application/javascript"); script.textContent = '(' + fn + ')();'; document.body.appendChild(script); // run the script document.body.removeChild(script); // clean up }, // handle for updating page css, taken from one of hateradio's scripts addCSS : function(style) { if(!this.style) { this.style = document.createElement('style'); this.style.type = 'text/css'; (document.head || document.getElementsByTagName('head')[0]).appendChild(this.style); } this.style.appendChild(document.createTextNode(style+'\n')); }, setValueIfSet: function(value,input,cond) { if (cond) { input.value = value; } else { input.value = ''; } }, // negative count will remove, positive count will add given number of artist boxes addRemoveArtistBoxes : function(count) { if (count != 0) { if (count < 0) { for (var i = 0; i < -count; i++) { yadg_util.exec(function() {RemoveArtistField()}); } } else { for (var i = 0; i < count; i++) { yadg_util.exec(function() {AddArtistField()}); } } } }, getOptionOffsets : function(select) { var option_offsets = {}; for (var j = 0; j < select.options.length; j++) { option_offsets[select.options[j].value] = select.options[j].index; } return option_offsets; }, storage : new LocalStorageWrapper("yadg"), settings : new LocalStorageWrapper("yadgSettings") }; // very simple wrapper for XmlHttpRequest function requester(url, method, callback, data, error_callback) { this.data = data; this.url = url; this.method = method; if (!error_callback) { error_callback = yadg.failed_callback; } this.send = function() { var details = { url : this.url, method : this.method, onload : function(response) { if (response.status === 200) { callback(JSON.parse(response.responseText)); } else if (response.status === 401) { yadg.failed_authentication_callback(); } else { error_callback(); } }, onerror : error_callback }; if (method == "POST") { details.data = JSON.stringify(this.data); } var headers = { "Accept" : "application/json", "Content-Type" : "application/json" }; if (yadg_util.settings.getItem(factory.KEY_API_TOKEN)) { headers.Authorization = "Token " + yadg_util.settings.getItem(factory.KEY_API_TOKEN); } details.headers = headers; GM_xmlhttpRequest(details); }; } var yadg_sandbox = { KEY_LAST_WARNING : "templateLastWarning", init : function(callback) { GM_xmlhttpRequest({ method: 'GET', url: yadg.yadgHost + '/static/js/jsandbox-worker.js', onload: function(response) { var script, dataURL = null; if (response.status === 200) { script = response.responseText; var blob = new Blob([script], {type: 'application/javascript'}); var URL = window.URL || window.webkitURL; if (!URL || !URL.createObjectURL) { throw new Error('No no valid implementation of window.URL.createObjectURL found.'); } dataURL = URL.createObjectURL(blob); yadg_sandbox.initCallback(dataURL); yadg_sandbox.loadSwig(callback); } else { yadg_sandbox.initCallbackError(); } }, onerror: function() { yadg_sandbox.initCallbackError(); } }); }, loadSwig : function(callback) { // importScripts for the web worker will not work in Firefox with cross-domain requests // see: https://bugzilla.mozilla.org/show_bug.cgi?id=756589 // so download the Swig files manually with GM_xmlhttpRequest GM_xmlhttpRequest({ method: 'GET', url: yadg.yadgHost + "/static/js/swig.min.js", onload: function(response) { if (response.status === 200) { yadg_sandbox.swig_script = response.responseText; GM_xmlhttpRequest({ method: 'GET', url: yadg.yadgHost + "/static/js/swig.custom.js", onload: function(response) { if (response.status === 200) { yadg_sandbox.swig_custom_script = response.responseText; callback(); } } }); } } }); }, initializeSwig : function(dependencies) { if (!(this.swig_script && this.swig_custom_script)) { yadg.failed_callback(); return } yadg_sandbox.exec({data: this.swig_script, onerror: yadg.failed_callback}); yadg_sandbox.exec({data: this.swig_custom_script, onerror: yadg.failed_callback}); yadg_sandbox.exec({data: "var myswig = new swig.Swig({ loader: swig.loaders.memory(input.templates), autoescape: false }), i=0; yadg_filters.register_filters(myswig);", input: {templates: dependencies}}); }, renderTemplate : function(template, data, callback, error) { var eval_string = "myswig.render(input.template, { locals: input.data, filename: 'scratchpad' + (i++) })"; this.eval({data: eval_string, callback: function(out) {callback(out);}, input: {template: template, data: data}, onerror: function(err){error(err);}}); }, initCallback : function(dataUrl) { JSandbox.url = dataUrl; this.jsandbox = new JSandbox(); this.initError = false; }, resetSandbox : function() { this.jsandbox.terminate(); this.jsandbox = new JSandbox(); }, load : function(options) { this.jsandbox.load(options); }, exec : function(options) { this.jsandbox.exec(options); }, eval : function(options) { this.jsandbox.eval(options); }, initCallbackError : function() { this.initError = true; var last_warning = yadg_util.storage.getItem(this.KEY_LAST_WARNING), now = new Date(); if (last_warning === null || now.getTime() - (new Date(last_warning)).getTime() > factory.CACHE_TIMEOUT) { alert("Could not load the necessary script files for executing YADG. If this error persists you might need to update the user script. You will only get this message once a day."); yadg_util.storage.addItem(this.KEY_LAST_WARNING, now); } } }; var factory = { // storage keys for cache KEY_LAST_CHECKED : "lastChecked", KEY_SCRAPER_LIST : "scraperList", KEY_FORMAT_LIST : "formatList", // storage keys for settings KEY_API_TOKEN : "apiToken", KEY_DEFAULT_TEMPLATE : "defaultTemplate", KEY_DEFAULT_SCRAPER : "defaultScraper", KEY_REPLACE_DESCRIPTION : "replaceDescriptionOn", KEY_SETTINGS_INIT_VER : "settingsInitializedVer", CACHE_TIMEOUT : 1000*60*60*24, // 24 hours UPDATE_PROGRESS : 0, locations : new Array( { name : 'pth_upload', regex : /http(s)?\:\/\/(.*\.)?passtheheadphones\.me\/upload\.php.*/i }, { name : 'pth_edit', regex : /http(s)?\:\/\/(.*\.)?passtheheadphones\.me\/torrents\.php\?action=editgroup&groupid=.*/i }, { name : 'pth_request', regex : /http(s)?\:\/\/(.*\.)?passtheheadphones\.me\/requests\.php\?action=new/i }, { name : 'pth_request_edit', regex : /http(s)?\:\/\/(.*\.)?passtheheadphones\.me\/requests\.php\?action=edit&id=.*/i }, { name : 'pth_torrent_overview', regex : /http(s)?\:\/\/(.*\.)?passtheheadphones\.me\/torrents\.php\?id=.*/i }, { name : 'waffles_upload', // regex : /http(s)?\:\/\/(.*\.)?waffles\.ch\/upload\.php\?legacy=1.*/i // }, // { // TODO: reenable support for the new Waffles upload page once it is reactivated // name : 'waffles_upload_new', regex : /http(s)?\:\/\/(.*\.)?waffles\.ch\/upload\.php.*/i }, { name : 'waffles_request', regex : /http(s)?\:\/\/(.*\.)?waffles\.ch\/requests\.php\?do=add/i } ), determineLocation : function(uri) { for (var i = 0; i < this.locations.length; i++) { if (this.locations[i].regex.test(uri)) { return this.locations[i].name; } } return null; }, init : function() { this.currentLocation = this.determineLocation(document.URL); // only continue with the initialization if we found a valid location if (this.currentLocation !== null) { this.insertIntoPage(this.getInputElements()); // set the necessary styles this.setStyles(); // make sure we initialize the settings to the most recent version this.initializeSettings(); // populate settings inputs this.populateSettings(); // add the appropriate action for the button var button = document.getElementById('yadg_submit'); button.addEventListener('click', function (e) { e.preventDefault(); yadg.makeRequest(); }, false); // add the action for the options toggle var toggleLink = document.getElementById('yadg_toggle_options'); if (toggleLink !== null) { toggleLink.addEventListener('click', function (e) { e.preventDefault(); var optionsDiv = document.getElementById('yadg_options'), display = optionsDiv.style.display; if (display == 'none' || display == '') { optionsDiv.style.display = 'block'; } else { optionsDiv.style.display = 'none'; } }); } // add the action for the template select var formatSelect = this.getFormatSelect(); if (formatSelect !== null) { formatSelect.addEventListener('change', function (e) { if (yadg_renderer.hasCached()) { yadg_renderer.renderCached(this.value, factory.setDescriptionBoxValue, factory.setDescriptionBoxValue); } }); } // add the action to the save settings link var saveSettingsLink = document.getElementById('yadg_save_settings'); if (saveSettingsLink !== null) { saveSettingsLink.addEventListener('click', function (e) { e.preventDefault(); factory.saveSettings(); alert("Settings saved successfully."); }); } // add the action to the clear cache link var clearCacheLink = document.getElementById('yadg_clear_cache'); if (clearCacheLink !== null) { clearCacheLink.addEventListener('click', function (e) { e.preventDefault(); yadg_util.storage.removeAll(); alert("Cache cleared. Please reload the page for this to take effect."); }); } var last_checked = yadg_util.storage.getItem(factory.KEY_LAST_CHECKED); if (last_checked === null || (new Date()).getTime() - (new Date(last_checked)).getTime() > factory.CACHE_TIMEOUT) { // update the scraper and formats list factory.UPDATE_PROGRESS = 1; yadg.getScraperList(factory.setScraperSelect); yadg.getFormatsList(factory.setFormatSelect); } else { factory.setScraperSelect(yadg_util.storage.getItem(factory.KEY_SCRAPER_LIST)); factory.setFormatSelect(yadg_util.storage.getItem(factory.KEY_FORMAT_LIST)); } return true; } else { return false; } }, getApiTokenInput : function() { return document.getElementById('yadg_api_token'); }, getReplaceDescriptionCheckbox : function() { return document.getElementById('yadg_options_replace'); }, getReplaceDescriptionSettingKey : function() { return this.makeReplaceDescriptionSettingsKey(this.currentLocation); }, makeReplaceDescriptionSettingsKey : function(subKey) { return this.KEY_REPLACE_DESCRIPTION + subKey.replace(/_/g, ""); }, initializeSettings : function() { var settings_ver = yadg_util.settings.getItem(factory.KEY_SETTINGS_INIT_VER), current_ver = 1; if (!settings_ver) { settings_ver = 0; } if (settings_ver < current_ver) { // replace descriptions on upload and new request pages var locations = [ 'pth_upload', 'pth_request', 'waffles_upload', 'waffles_upload_new', 'waffles_request' ]; for (var i = 0; i < locations.length; i++) { var loc = locations[i], replace_desc_setting_key = factory.makeReplaceDescriptionSettingsKey(loc); yadg_util.settings.addItem(replace_desc_setting_key, true); } } yadg_util.settings.addItem(factory.KEY_SETTINGS_INIT_VER, current_ver); }, populateSettings : function() { var api_token = yadg_util.settings.getItem(factory.KEY_API_TOKEN), replace_desc = yadg_util.settings.getItem(factory.getReplaceDescriptionSettingKey()); if (api_token) { var api_token_input = factory.getApiTokenInput(); api_token_input.value = api_token; } if (replace_desc) { var replace_desc_checkbox = factory.getReplaceDescriptionCheckbox(); replace_desc_checkbox.checked = true; } }, saveSettings : function() { var scraper_select = factory.getScraperSelect(), template_select = factory.getFormatSelect(), api_token_input = factory.getApiTokenInput(), replace_desc_checkbox = factory.getReplaceDescriptionCheckbox(); var current_scraper = null, current_template = null, api_token = api_token_input.value.trim(), replace_description = replace_desc_checkbox.checked; if (scraper_select.options.length > 0) { current_scraper = scraper_select.options[scraper_select.selectedIndex].value; } if (template_select.options.length > 0) { current_template = template_select.options[template_select.selectedIndex].value; } if (current_scraper !== null) { yadg_util.settings.addItem(factory.KEY_DEFAULT_SCRAPER, current_scraper); } if (current_template !== null) { yadg_util.settings.addItem(factory.KEY_DEFAULT_TEMPLATE, current_template); } if (api_token !== "") { yadg_util.settings.addItem(factory.KEY_API_TOKEN, api_token); } else { yadg_util.settings.removeItem(factory.KEY_API_TOKEN); } var replace_desc_setting_key = factory.getReplaceDescriptionSettingKey(); if (replace_description) { yadg_util.settings.addItem(replace_desc_setting_key, true); } else { yadg_util.settings.removeItem(replace_desc_setting_key); } }, setDescriptionBoxValue : function(value) { var desc_box = factory.getDescriptionBox(), replace_desc_checkbox = factory.getReplaceDescriptionCheckbox(), replace_desc = false; if (replace_desc_checkbox !== null) { replace_desc = replace_desc_checkbox.checked; } if (desc_box !== null) { if (!replace_desc && /\S/.test(desc_box.value)) { // check if the current description contains more than whitespace desc_box.value += "\n\n" + value; } else { desc_box.value = value; } } }, getFormatSelect : function() { return document.getElementById('yadg_format'); }, setDefaultFormat : function() { var format_select = factory.getFormatSelect(); var format_offsets = yadg_util.getOptionOffsets(format_select); var default_format = yadg_util.settings.getItem(factory.KEY_DEFAULT_TEMPLATE); if (default_format !== null && default_format in format_offsets) { format_select.selectedIndex = format_offsets[default_format]; } else { // we have no settings so fall back to the hard coded defaults switch (this.currentLocation) { case "waffles_upload": case "waffles_upload_new": case "waffles_request": format_select.selectedIndex = format_offsets[defaultWafflesFormat]; break; default: format_select.selectedIndex = format_offsets[defaultPTHFormat]; break; } } }, getScraperSelect : function() { return document.getElementById("yadg_scraper"); }, setDefaultScraper : function() { var default_scraper = yadg_util.settings.getItem(factory.KEY_DEFAULT_SCRAPER); if (default_scraper !== null) { var scraper_select = factory.getScraperSelect(); var scraper_offsets = yadg_util.getOptionOffsets(scraper_select); if (default_scraper in scraper_offsets) { scraper_select.selectedIndex = scraper_offsets[default_scraper]; } } }, setScraperSelect : function(scrapers) { var scraper_select = factory.getScraperSelect(); factory.setSelect(scraper_select, scrapers); factory.setDefaultScraper(); if (factory.UPDATE_PROGRESS > 0) { yadg_util.storage.addItem(factory.KEY_SCRAPER_LIST, scrapers); factory.UPDATE_PROGRESS |= 1<<1; if (factory.UPDATE_PROGRESS === 7) { yadg_util.storage.addItem(factory.KEY_LAST_CHECKED, new Date()); } } }, setFormatSelect : function(templates) { var format_select = factory.getFormatSelect(); var non_utility = []; var save_templates = []; for (var i = 0; i < templates.length; i++) { if (factory.UPDATE_PROGRESS > 0) { yadg_templates.addTemplate(templates[i]); save_templates.push({ id : templates[i]['id'], url : templates[i]['url'], name : templates[i]['name'], nameFormatted : templates[i]['nameFormatted'], owner : templates[i]['owner'], default : templates[i]['default'], isUtility : templates[i]['isUtility'] }); } else { yadg_templates.addTemplateUrl(templates[i]['id'], templates[i]['url']); } if (!templates[i]['isUtility']) { non_utility.push(templates[i]); } } factory.setSelect(format_select, non_utility); factory.setDefaultFormat(); if (factory.UPDATE_PROGRESS > 0) { yadg_util.storage.addItem(factory.KEY_FORMAT_LIST, save_templates); factory.UPDATE_PROGRESS |= 1<<2; if (factory.UPDATE_PROGRESS === 7) { yadg_util.storage.addItem(factory.KEY_LAST_CHECKED, new Date()); } } }, setSelect : function(select, data) { select.options.length = data.length; for (var i = 0; i < data.length; i++) { // we are not using the javascript constructor to create an Option instance because this will create an // incompatibility with jQuery in Chrome which will make it impossible to add a new artist field on passtheheadphones.me var o = document.createElement("option"); if ('nameFormatted' in data[i]) { o.text = data[i]['nameFormatted']; } else { o.text = data[i]['name']; } o.value = data[i]['value'] || data[i]['id']; o.selected = data[i]['default']; select.options[i] = o; if (data[i]['default']) { select.selectedIndex = i; } if (data[i]['url']) { o.setAttribute('data-url', data[i]['url']); } } }, setStyles : function() { // general styles yadg_util.addCSS('div#yadg_options{ display:none; margin-top:3px; } input#yadg_input,input#yadg_submit,label#yadg_format_label,a#yadg_scraper_info { margin-right: 5px } div#yadg_response { margin-top:3px; } select#yadg_scraper { margin-right: 2px } #yadg_options_template,#yadg_options_api_token,#yadg_options_replace_div { margin-bottom: 3px; } .add_form[name="yadg"] input,.add_form[name="yadg"] select { width: 90%; margin: 2px 0 !important; } div.box { border:none !important }'); // location specific styles will go here switch(this.currentLocation) { case "waffles_upload": yadg_util.addCSS('div#yadg_response ul { margin-left: 0 !important; padding-left: 0 !important; }'); break; case "waffles_request": yadg_util.addCSS('div#yadg_response ul { margin-left: 0 !important; padding-left: 0 !important; }'); break; default: break; } }, getInputElements : function() { var buttonHTML = '<input type="submit" value="Fetch" id="yadg_submit"/>', scraperSelectHTML = '<select name="yadg_scraper" id="yadg_scraper"></select>', optionsHTML = '<div id="yadg_options"><div id="yadg_options_template"><label for="yadg_format" id="yadg_format_label">Template:</label><select name="yadg_format" id="yadg_format"></select></div><div id="yadg_options_api_token"><label for="yadg_api_token" id="yadg_api_token_label">API token (<a href="https://yadg.cc/api/token" target="_blank">Get one here</a>):</label> <input type="text" name="yadg_api_token" id="yadg_api_token" size="50" /></div><div id="yadg_options_replace_div"><input type="checkbox" name="yadg_options_replace" id="yadg_options_replace" /> <label for="yadg_options_replace" id="yadg_options_replace_label">Replace descriptions on this page</label></div><div id="yadg_options_links"><a id="yadg_save_settings" href="#" title="Save the currently selected scraper and template as default for this site and save the given API token.">Save settings</a> <span class="yadg_separator">|</span> <a id="yadg_clear_cache" href="#">Clear cache</a></div></div>', inputHTML = '<input type="text" name="yadg_input" id="yadg_input" size="60" />', responseDivHTML = '<div id="yadg_response"></div>', toggleOptionsLinkHTML = '<a id="yadg_toggle_options" href="#">Toggle options</a>', scraperInfoLink = '<a id="yadg_scraper_info" href="https://yadg.cc/available-scrapers" target="_blank" title="Get additional information on the available scrapers">[?]</a>'; switch (this.currentLocation) { case "pth_upload": var tr = document.createElement('tr'); tr.className = "yadg_tr"; tr.innerHTML = '<td class="label">YADG:</td><td>' + inputHTML + scraperSelectHTML + scraperInfoLink + buttonHTML + toggleOptionsLinkHTML + optionsHTML + responseDivHTML + '</td>'; return tr; case "pth_edit": var div = document.createElement('div'); div.className = "yadg_div"; div.innerHTML = '<h3 class="label">YADG:</h3>\n' + inputHTML + '\n' + scraperSelectHTML + '\n' + scraperInfoLink + '\n' + buttonHTML + '\n' + toggleOptionsLinkHTML + '\n' + optionsHTML + '\n' + responseDivHTML; return div; case "pth_torrent_overview": var div = document.createElement('div'); div.id = 'yadg_div' div.className = 'box'; div.innerHTML = '<div class="head"><strong>YADG</strong></div>\n<div class="body">\n<form class="add_form" name="yadg" method="post">\n<input type="text" name="yadg_input" id="yadg_input" />\n' + scraperSelectHTML + '\n' + scraperInfoLink + '\n' + buttonHTML + '\n' + toggleOptionsLinkHTML + '\n' + optionsHTML + '\n' + responseDivHTML; return div; case "pth_request": case "pth_request_edit": var tr = document.createElement('tr'); tr.className = "yadg_tr"; tr.innerHTML = '<td class="label">YADG:</td><td>' + inputHTML + scraperSelectHTML + scraperInfoLink + buttonHTML + toggleOptionsLinkHTML + optionsHTML + responseDivHTML + '</td>'; return tr; case "waffles_upload": var tr = document.createElement('tr'); tr.className = "yadg_tr"; tr.innerHTML = '<td class="heading" valign="top" align="right"><label for="yadg_input">YADG:</label></td><td>' + inputHTML + scraperSelectHTML + scraperInfoLink + buttonHTML + toggleOptionsLinkHTML + optionsHTML + responseDivHTML + '</td>'; return tr; case "waffles_upload_new": var p = document.createElement('p'); p.className = "yadg_p"; p.innerHTML = '<label for="yadg_input">YADG:</label>' + inputHTML + scraperSelectHTML + scraperInfoLink + buttonHTML + toggleOptionsLinkHTML + optionsHTML + responseDivHTML; return p; case "waffles_request": var tr = document.createElement('tr'); tr.className = "yadg_tr"; tr.innerHTML = '<td style="text-align:left;width:100px;">YADG:</td><td style="text-align:left;">' + inputHTML + scraperSelectHTML + scraperInfoLink + buttonHTML + toggleOptionsLinkHTML + optionsHTML + responseDivHTML + '</td>'; return tr; default: // that should actually never happen return document.createElement('div'); } }, insertIntoPage : function(element) { switch (this.currentLocation) { case "pth_upload": var year_tr = document.getElementById('year_tr'); year_tr.parentNode.insertBefore(element,year_tr); break; case "pth_edit": var summary_input = document.getElementsByName('summary')[0]; summary_input.parentNode.insertBefore(element,summary_input.nextSibling.nextSibling); break; case "pth_torrent_overview": var add_artists_box = document.getElementsByClassName("box_addartists")[0]; add_artists_box.appendChild(element); break; case "pth_request": case "pth_request_edit": var artist_tr = document.getElementById('artist_tr'); artist_tr.parentNode.insertBefore(element,artist_tr); break; case "waffles_upload": var submit_button = document.getElementsByName('submit')[0]; submit_button.parentNode.parentNode.parentNode.insertBefore(element,submit_button.parentNode.parentNode); break; case "waffles_upload_new": var h4s = document.getElementsByTagName('h4'); var div; for (var i=0; i < h4s.length; i++) { if (h4s[i].innerHTML.indexOf('read the rules') !== -1) { div = h4s[i].parentNode; break; } } div.appendChild(element); break; case "waffles_request": var category_select = document.getElementsByName('category')[0]; category_select.parentNode.parentNode.parentNode.insertBefore(element,category_select.parentNode.parentNode); break; default: break; } }, getDescriptionBox : function() { switch (this.currentLocation) { case "pth_upload": return document.getElementById('album_desc'); case "pth_edit": return document.getElementsByName('body')[0]; case "pth_torrent_overview": if (!this.hasOwnProperty("dummybox")) { this.dummybox = document.createElement('div'); } return this.dummybox; case "pth_request": case "pth_request_edit": return document.getElementsByName('description')[0]; case "waffles_upload": return document.getElementById('descr'); case "waffles_upload_new": return document.getElementById('id_descr'); case "waffles_request": return document.getElementsByName('information')[0]; default: // that should actually never happen return document.createElement('div'); } }, getFormFillFunction : function() { switch (this.currentLocation) { case "pth_upload": var f = function(rawData) { var artist_inputs = document.getElementsByName("artists[]"), album_title_input = document.getElementById("title"), year_input = document.getElementById("year"), label_input = document.getElementById("record_label"), catalog_input = document.getElementById("catalogue_number"), tags_input = document.getElementById("tags"), data = yadg.prepareRawResponse(rawData); if (data.artists != false) { var input_idx = 0; yadg_util.addRemoveArtistBoxes(data.effective_artist_count - artist_inputs.length); artist_inputs = document.getElementsByName("artists[]"); for (var i = 0; i < data.artist_keys.length; i++) { var artist_key = data.artist_keys[i], artist_types = data.artists[artist_key]; for (var j = 0; j < artist_types.length; j++) { var artist_type = artist_types[j], artist_input = artist_inputs[input_idx], type_select = artist_input.nextSibling; while (type_select.tagName != 'SELECT') { type_select = type_select.nextSibling; } artist_input.value = artist_key; var option_offsets = yadg_util.getOptionOffsets(type_select); if (artist_type === "main") { type_select.selectedIndex = option_offsets[1]; } else if (artist_type === "guest") { type_select.selectedIndex = option_offsets[2]; } else if (artist_type === "remixer") { type_select.selectedIndex = option_offsets[3]; } else { // we don't know this artist type, default to "main" type_select.selectedIndex = option_offsets[1]; } // next artist input input_idx += 1; } } } else { for (var i = 0; i < artist_inputs.length; i++) { artist_inputs[i].value = ''; } } if (data.tags != false) { tags_input.value = data.tag_string.toLowerCase(); } else { tags_input.value = ''; } yadg_util.setValueIfSet(data.year,year_input,data.year != false); yadg_util.setValueIfSet(data.title,album_title_input,data.title != false); yadg_util.setValueIfSet(data.label,label_input,data.label != false); yadg_util.setValueIfSet(data.catalog,catalog_input,data.catalog != false); }; return f; case "pth_edit": f = function(rawData) { var summary_input = document.getElementsByName("summary")[0]; var data = yadg.prepareRawResponse(rawData); summary_input.value = 'YADG Update'; }; return f; case "pth_torrent_overview": f = function(rawData) { var artist_inputs = document.getElementsByName("aliasname[]"), data = yadg.prepareRawResponse(rawData); if (data.artists != false) { var input_idx = 0; yadg_util.addRemoveArtistBoxes(data.effective_artist_count - artist_inputs.length); artist_inputs = document.getElementsByName("aliasname[]"); for (var i = 0; i < data.artist_keys.length; i++) { var artist_key = data.artist_keys[i], artist_types = data.artists[artist_key]; for (var j = 0; j < artist_types.length; j++) { var artist_type = artist_types[j], artist_input = artist_inputs[input_idx], type_select = artist_input.nextSibling; while (type_select.tagName != 'SELECT') { type_select = type_select.nextSibling; } artist_input.value = artist_key; var option_offsets = yadg_util.getOptionOffsets(type_select); if (artist_type === "main") { type_select.selectedIndex = option_offsets[1]; } else if (artist_type === "guest") { type_select.selectedIndex = option_offsets[2]; } else if (artist_type === "remixer") { type_select.selectedIndex = option_offsets[3]; } else { // we don't know this artist type, default to "main" type_select.selectedIndex = option_offsets[1]; } // next artist input input_idx += 1; } } } else { for (var i = 0; i < artist_inputs.length; i++) { artist_inputs[i].value = ''; } } }; return f; case "pth_request": case "pth_request_edit": var f = function(rawData) { var artist_inputs = document.getElementsByName("artists[]"), album_title_input = document.getElementsByName("title")[0], year_input = document.getElementsByName("year")[0], label_input = document.getElementsByName("recordlabel")[0], catalog_input = document.getElementsByName("cataloguenumber")[0], tags_input = document.getElementById("tags"), data = yadg.prepareRawResponse(rawData); if (data.artists != false) { var input_idx = 0; yadg_util.addRemoveArtistBoxes(data.effective_artist_count - artist_inputs.length); artist_inputs = document.getElementsByName("artists[]"); for (var i = 0; i < data.artist_keys.length; i++) { var artist_key = data.artist_keys[i], artist_types = data.artists[artist_key]; for (var j = 0; j < artist_types.length; j++) { var artist_type = artist_types[j], artist_input = artist_inputs[input_idx], type_select = artist_input.nextSibling; while (type_select.tagName != 'SELECT') { type_select = type_select.nextSibling; } artist_input.value = artist_key; var option_offsets = yadg_util.getOptionOffsets(type_select); if (artist_type === "main") { type_select.selectedIndex = option_offsets[1]; } else if (artist_type === "guest") { type_select.selectedIndex = option_offsets[2]; } else if (artist_type === "remixer") { type_select.selectedIndex = option_offsets[3]; } else { // we don't know this artist type, default to "main" type_select.selectedIndex = option_offsets[1]; } // next artist input input_idx += 1; } } } else { for (var i = 0; i < artist_inputs.length; i++) { artist_inputs[i].value = ''; } } if (data.tags != false) { tags_input.value = data.tag_string.toLowerCase(); } else { tags_input.value = ''; } yadg_util.setValueIfSet(data.year,year_input,data.year != false); yadg_util.setValueIfSet(data.title,album_title_input,data.title != false); yadg_util.setValueIfSet(data.label,label_input,data.label != false); yadg_util.setValueIfSet(data.catalog,catalog_input,data.catalog != false); }; return f; case "waffles_upload": var f = function(rawData) { var artist_input = document.getElementsByName("artist")[0], album_title_input = document.getElementsByName("album")[0], year_input = document.getElementsByName("year")[0], va_checkbox = document.getElementById("va"), tags_input = document.getElementById("tags"), data = yadg.prepareRawResponse(rawData); if (data.artists != false) { if (data.is_various) { artist_input.value = ""; va_checkbox.checked = true; } else { artist_input.value = data.flat_artist_string; va_checkbox.checked = false; } } else { va_checkbox.checked = false; artist_input.value = ""; } yadg_util.setValueIfSet(data.year,year_input,data.year != false); yadg_util.setValueIfSet(data.title,album_title_input,data.title != false); if (data.tags != false) { tags_input.value = data.tag_string_nodots.toLowerCase(); } else { tags_input.value = ''; } yadg_util.exec(function() {formatName()}); }; return f; case "waffles_upload_new": var f = function(rawData) { var artist_input = document.getElementById("id_artist"), album_title_input = document.getElementById("id_album"), year_input = document.getElementById("id_year"), va_checkbox = document.getElementById("id_va"), tags_input = document.getElementById("id_tags"), data = yadg.prepareRawResponse(rawData); if (data.artists != false) { if (data.is_various) { if (!va_checkbox.checked) { va_checkbox.click(); } } else { if (va_checkbox.checked) { va_checkbox.click(); } artist_input.value = data.flat_artist_string; } } else { if (va_checkbox.checked) { va_checkbox.click(); } artist_input.value = ""; } yadg_util.setValueIfSet(data.year,year_input,data.year != false); yadg_util.setValueIfSet(data.title,album_title_input,data.title != false); if (data.tags != false) { tags_input.value = data.tag_string_nodots.toLowerCase(); } else { tags_input.value = ''; } }; return f; case "waffles_request": var f = function(rawData) { var artist_input = document.getElementsByName("artist")[0], album_title_input = document.getElementsByName("title")[0], year_input = document.getElementsByName("year")[0], data = yadg.prepareRawResponse(rawData); if (data.artists != false) { if (data.is_various) { artist_input.value = "Various Artists"; } else { artist_input.value = data.flat_artist_string; } } else { artist_input.value = ""; } yadg_util.setValueIfSet(data.year,year_input,data.year != false); yadg_util.setValueIfSet(data.title,album_title_input,data.title != false); }; return f; default: // that should actually never happen return function(data) {}; } } }; var yadg_templates = { _templates : {}, _template_urls : {}, getTemplate : function(id, callback) { if (id in this._templates) { callback(this._templates[id]); } else if (id in this._template_urls) { var request = new requester(this._template_urls[id], 'GET', function(template) { yadg_templates.addTemplate(template); callback(template); }, null, yadg_templates.errorTemplate); request.send(); } else { this.errorTemplate(); } }, addTemplate : function(template) { this._templates[template.id] = template; }, addTemplateUrl : function(id, url) { this._template_urls[id] = url; }, errorTemplate : function() { yadg.printError("Could not get template. Please choose another one.", true); } }; var yadg_renderer = { _last_data : null, _last_template_id : null, render : function(template_id, data, callback, error_callback) { this._last_data = data; var new_template = this._last_template_id !== template_id; this._last_template_id = template_id; yadg_templates.getTemplate(template_id, function(template) { // the new template might have different dependencies, so initialize Swig with those if (new_template) { yadg_sandbox.resetSandbox(); yadg_sandbox.initializeSwig(template.dependencies); } yadg_sandbox.renderTemplate(template.code, data, callback, error_callback); }); }, renderCached : function(template_id, callback, error_callback) { if (this.hasCached()) { this.render(template_id, this._last_data, callback, error_callback); } }, hasCached : function() { return this._last_data !== null; }, clearCached : function() { this._last_data = null; } }; var yadg = { yadgHost : "https://yadg.cc", baseURI : "/api/v2/", standardError : "Sorry, an error occured. Please try again. If this error persists the user script might need updating.", authenticationError : "Your API token is invalid. Please provide a valid API token or remove the current one.", lastStateError : false, isBusy : false, init : function() { this.scraperSelect = document.getElementById('yadg_scraper'); this.formatSelect = document.getElementById('yadg_format'); this.input = document.getElementById('yadg_input'); this.responseDiv = document.getElementById('yadg_response'); this.button = document.getElementById('yadg_submit'); }, getBaseURL : function() { return this.yadgHost + this.baseURI; }, getScraperList : function(callback) { var url = this.getBaseURL() + "scrapers/"; var request = new requester(url, 'GET', callback); request.send(); }, getFormatsList : function(callback) { var url = this.getBaseURL() + "templates/"; this.getTemplates(url, [], callback); }, getTemplates : function(url, templates, callback) { var request = new requester(url, 'GET', function(data) { for (var i = 0; i < data.results.length; i++) { templates.push(data.results[i]); } if (data.next !== null) { yadg.getTemplates(data.next, templates, callback); } else { callback(templates); } }); request.send(); }, makeRequest : function(params) { if (this.isBusy) return; var data; if (params) { data = params; } else { data = { scraper: this.scraperSelect.options[this.scraperSelect.selectedIndex].value, input: this.input.value }; } var url = this.getBaseURL() + 'query/'; if (data.input !== '') { var request = new requester(url, 'POST', function(result) { yadg.getResult(result.url); }, data); this.busyStart(); request.send(); } }, getResult : function(result_url) { var request = new requester(result_url, 'GET', function(response) { if (response.status == "done") { if (response.data.type == 'ReleaseResult') { var template_id = yadg.formatSelect.options[yadg.formatSelect.selectedIndex].value; yadg_renderer.render(template_id, response, factory.setDescriptionBoxValue, factory.setDescriptionBoxValue); if (yadg.lastStateError == true) { yadg.responseDiv.innerHTML = ""; yadg.lastStateError = false; } var fillFunc = factory.getFormFillFunction(); fillFunc(response.data); } else if (response.data.type == 'ListResult') { var ul = document.createElement('ul'); ul.id = "yadg_release_list"; var release_list = response.data.items; for (var i = 0; i < release_list.length;i++) { var name = release_list[i]['name'], info = release_list[i]['info'], query_params = release_list[i]['queryParams'], release_url = release_list[i]['url']; var li = document.createElement('li'), a = document.createElement('a'); a.textContent = name; a.params = query_params; a.href = release_url; a.addEventListener('click',function(e) { e.preventDefault(); yadg.makeRequest(this.params);},false); li.appendChild(a); li.appendChild(document.createElement('br')); li.appendChild(document.createTextNode(info)); ul.appendChild(li); } if (ul.childNodes.length != 0) { yadg.responseDiv.innerHTML = ""; yadg.responseDiv.appendChild(ul); yadg.lastStateError = false; // we got a ListResult so clear the last ReleaseResult from the render cache yadg_renderer.clearCached(); } else { yadg.printError('Sorry, there were no matches.'); } } else if (response.data.type == 'NotFoundResult') { yadg.printError('I could not find the release with the given ID. You may want to try again with another one.'); } else { yadg.printError('Something weird happened. Please try again'); } yadg.busyStop(); } else if (response.status == 'failed') { yadg.failed_callback(); } else { var delay = function() { yadg.getResult(response.url); }; window.setTimeout(delay, 1000); } }); request.send(); }, printError : function(message, template_error) { this.responseDiv.innerHTML = ""; this.responseDiv.appendChild(document.createTextNode(message)); if (!template_error) { this.lastStateError = true; // there was a non template related error, so for consistencies sake clear the last ReleaseResult from the // render cache yadg_renderer.clearCached(); } }, failed_callback : function() { yadg.printError(yadg.standardError); yadg.busyStop(); }, failed_authentication_callback : function() { yadg.printError(yadg.authenticationError); yadg.busyStop(); }, busyStart : function() { this.isBusy = true; this.button.setAttribute('disabled',true); this.button.value = "Please wait..."; this.input.setAttribute('disabled',true); this.scraperSelect.setAttribute('disabled',true); this.formatSelect.setAttribute('disabled',true); }, busyStop : function() { this.button.removeAttribute('disabled'); this.button.value = "Fetch"; this.input.removeAttribute('disabled'); this.scraperSelect.removeAttribute('disabled'); this.formatSelect.removeAttribute('disabled'); this.isBusy = false; }, prepareRawResponse : function(rawData) { var result = {}; result.artists = false; result.year = false; result.title = false; result.label = false; result.catalog = false; result.genre = false; result.style = false; result.tags = false; result.is_various = false; result.flat_artist_string = false; if (rawData.artists.length > 0) { result.artists = {}; for (var i = 0; i < rawData.artists.length; i++) { var artist = rawData.artists[i]; if (!artist["isVarious"]) { result.artists[artist["name"]] = artist["types"]; } else { result.is_various = true; } } } if (rawData.discs.length > 0) { for (var k = 0; k < rawData.discs.length; k++) { var disc = rawData.discs[k]; for (var l = 0; l < disc["tracks"].length; l++) { var track = disc["tracks"][l]; for (var m = 0; m < track["artists"].length; m++) { var name = track["artists"][m]["name"], type = track["artists"][m]["types"]; var newTypes = null; if (name in result.artists) { newTypes = result.artists[name].concat(type); // deduplicate new types array for(var i = 0; i < newTypes.length; ++i) { for(var j = i+1; j < newTypes.length; ++j) { if(newTypes[i] === newTypes[j]) newTypes.splice(j--, 1); } } } else { newTypes = type; } result.artists[name] = newTypes; } } } } for (var i = 0; i < rawData['releaseEvents'].length; i++) { var event = rawData['releaseEvents'][i]; if (event.date) { result.year = event.date.match(/\d{4}/)[0]; if (result.year.length != 4) { result.year = false; } else { break; } } } if (rawData.title) { result.title = rawData.title; } if (rawData.labelIds.length > 0) { var labelId = rawData['labelIds'][0]; if (labelId.label) { result.label = labelId.label; } if (labelId.catalogueNrs.length > 0) { result.catalog = labelId.catalogueNrs[0]; } } if (rawData.genres.length > 0) { result.genre = rawData.genres; } if (rawData.styles.length > 0) { result.style = rawData.styles; } if (result.genre != false && result.style != false) { result.tags = rawData.genres.concat(rawData.styles); } else if (result.genre != false) { result.tags = rawData.genres; } else if (result.style != false) { result.tags = rawData.styles; } if (result.tags != false) { result.tag_string = ""; result.tag_string_nodots = ""; for (var i = 0; i < result.tags.length; i++) { result.tag_string = result.tag_string + result.tags[i].replace(/\s+/g,'.'); result.tag_string_nodots = result.tag_string_nodots + result.tags[i].replace(/\s+/g,' '); if (i != result.tags.length-1) { result.tag_string = result.tag_string + ', '; result.tag_string_nodots = result.tag_string_nodots + ', '; } } } if (result.artists != false) { // count the artists result.artists_length = 0; result.artist_keys = []; result.effective_artist_count = 0; for (var i in result.artists) { if (result.artists.hasOwnProperty(i)) { result.artists_length++; result.artist_keys.push(i); result.effective_artist_count += result.artists[i].length; } } } if (result.artists_length == 0) { result.artists = false; } else { // create a flat string of all the main artists var artist_string = ""; for (var i = 0; i < result.artists_length; i++) { if (result.artists[result.artist_keys[i]].indexOf("main") != -1) { if (artist_string != "" && i < result.artists_length - 2) { artist_string = artist_string + ", "; } else if (artist_string != "" && i < result.artists_length - 1) { artist_string = artist_string + " & "; } artist_string = artist_string + result.artist_keys[i]; } } result.flat_artist_string = artist_string; } return result; } }; yadg_sandbox.init(function() { if (factory.init()) { // returns true if we run on a valid location yadg.init(); } });
Shorten name
pth_yadg.user.js
Shorten name
<ide><path>th_yadg.user.js <ide> // ==UserScript== <ide> // @id pth-yadg <del>// @name passtheheadphones.me - YADG <add>// @name PTH YADG <ide> // @description This script provides integration with online description generator YADG (http://yadg.cc) - Credit to Slack06 <ide> // @license https://github.com/SavageCore/yadg-pth-userscript/blob/master/LICENSE <ide> // @version 1.3.10
JavaScript
mit
15b383afbb00992425732b1c3704f4a32967ee35
0
mure-apps/mure-library
/* eslint no-useless-escape: 0 */ import 'babel-polyfill'; import * as d3 from 'd3'; import datalib from 'datalib'; import md5 from 'md5'; import * as jsonpath from 'jsonpath'; import PouchDB from 'pouchdb'; import { Model } from 'uki'; import appList from './appList.json'; import mureInteractivityRunnerText from './mureInteractivityRunner.js'; class Mure extends Model { constructor () { super(); this.appList = appList; // Check if we're even being used in the browser (mostly useful for getting // access to the applist in all-apps-dev-server.js) if (typeof document === 'undefined' || typeof window === 'undefined') { return; } // Enumerations... this.VALID_EVENTS = { fileListChange: {}, fileChange: {}, domChange: {}, metadataChange: {}, error: {} }; this.CONTENT_FORMATS = { exclude: 0, blob: 1, dom: 2, base64: 3 }; this.SIGNALS = { cancelled: {} }; this.ENCODING_TYPES = { constant: 0 }; // The namespace string for our custom XML this.NSString = 'http://mure-apps.github.io'; d3.namespaces.mure = this.NSString; // Funky stuff to figure out if we're debugging (if that's the case, we want to use // localhost instead of the github link for all links) let windowTitle = document.getElementsByTagName('title')[0]; windowTitle = windowTitle ? windowTitle.textContent : ''; this.debugMode = window.location.hostname === 'localhost' && windowTitle.startsWith('Mure'); // Figure out which app we are (or null if the mure library is being used somewhere else) this.currentApp = window.location.pathname.replace(/\//g, ''); if (!this.appList[this.currentApp]) { this.currentApp = null; } // Create / load the local database of files this.lastFile = null; this.db = this.getOrInitDb(); // default error handling (apps can listen for / display error messages in addition to this): this.on('error', errorMessage => { console.warn(errorMessage); }); this.catchDbError = errorObj => { this.trigger('error', 'Unexpected error reading PouchDB: ' + errorObj.message + '\n' + errorObj.stack); }; // in the absence of a custom dialogs, just use window.alert, window.confirm and window.prompt: this.alert = (message) => { return new Promise((resolve, reject) => { window.alert(message); resolve(true); }); }; this.confirm = (message) => { return new Promise((resolve, reject) => { resolve(window.confirm(message)); }); }; this.prompt = (message, defaultValue) => { return new Promise((resolve, reject) => { resolve(window.prompt(message, defaultValue)); }); }; } on (eventName, callback) { if (!this.VALID_EVENTS[eventName]) { throw new Error('Unknown event name: ' + eventName); } else { super.on(eventName, callback); } } customizeAlertDialog (showDialogFunction) { this.alert = showDialogFunction; } customizeConfirmDialog (showDialogFunction) { this.confirm = showDialogFunction; } customizePromptDialog (showDialogFunction) { this.prompt = showDialogFunction; } openApp (appName, newTab) { if (newTab) { window.open('/' + appName, '_blank'); } else { window.location.pathname = '/' + appName; } } getOrInitDb () { let db = new PouchDB('mure'); (async () => { this.lastFile = null; try { let prefs = await db.get('userPrefs'); if (prefs.currentFile) { this.lastFile = await this.getFile(prefs.currentFile); } } catch (errorObj) { if (errorObj.message === 'missing') { db.put({ _id: 'userPrefs', currentFile: null }); } else { this.catchDbError(errorObj); } } })(); db.changes({ since: 'now', live: true, include_docs: true }).on('change', change => { let fileChanged; let fileListChanged; let domChanged; let metadataChanged; if (change.deleted) { if (change.doc._id !== this.lastFile._id) { // Weird corner case: if we just deleted a file that wasn't the current one, // we won't ever get a change event on the userPrefs object; in that case, // we need to trigger the fileListChanged event immediately (async () => { let fileList = await this.getFileList(); this.trigger('fileListChange', fileList); })().catch(this.catchDbError); } // Whether or not the deleted file was the currently open one, don't // trigger any other events; we want events to fire in context of the // new stuff getting loaded below return; } let currentFile; if (change.doc._id === 'userPrefs') { // We just changed the currently open file; trigger all the events fileChanged = fileListChanged = domChanged = metadataChanged = true; currentFile = this.lastFile; } else { if (this.lastFile === null || this.lastFile._id !== change.doc._id) { // The file itself is changing; DON'T actually trigger any of the events // because userPrefs is about to be changed as well... and things listening // to changes that care about checking userPrefs will want to do it with // its value updated fileChanged = fileListChanged = domChanged = metadataChanged = false; } else { fileChanged = false; domChanged = this.lastFile._attachments[this.lastFile._id].digest !== change.doc._attachments[change.doc._id].digest; metadataChanged = this.lastFile.metadataDigest !== change.doc.metadataDigest; } this.lastFile = currentFile = change.doc; } if (fileChanged) { this.trigger('fileChange', currentFile); } if (fileListChanged) { (async () => { let fileList = await this.getFileList(); this.trigger('fileListChange', fileList); })().catch(this.catchDbError); } if (domChanged) { (async () => { let blob = currentFile ? await this.getFileAsBlob(currentFile._id) : null; this.trigger('domChange', blob); })(); } if (metadataChanged) { this.trigger('metadataChange', currentFile ? currentFile.metadata : null); } }).on('error', errorObj => { this.catchDbError(errorObj); }); return db; } async setCurrentFile (filename) { return this.db.get('userPrefs').then(prefs => { prefs.currentFile = filename; return this.db.put(prefs); }).catch(this.catchDbError); } async getFile (filename, contentFormat) { if (!filename) { filename = await this.getCurrentFilename(); } let pouchdbOptions = {}; if (contentFormat !== this.CONTENT_FORMATS.exclude) { pouchdbOptions.attachments = true; if (contentFormat === this.CONTENT_FORMATS.blob) { pouchdbOptions.binary = true; } } if (filename !== null) { return this.db.get(filename, pouchdbOptions || {}) .then(fileObj => { if (contentFormat === this.CONTENT_FORMATS.dom) { let xmlText = window.atob(fileObj._attachments[fileObj._id].data); let dom = new window.DOMParser().parseFromString(xmlText, 'image/svg+xml'); fileObj._attachments[fileObj._id].dom = dom; } return fileObj; }); } else { return Promise.resolve(null); } } async saveFile (options) { try { let existingDoc; if (!options.blobOrBase64string) { existingDoc = await this.getFile(options.filename, this.CONTENT_FORMATS.exclude); } else { existingDoc = await this.getFile(options.filename, this.CONTENT_FORMATS.blob); existingDoc._attachments[options.filename].data = options.blobOrBase64string; if ((!options.metadata || Object.keys(options.metadata).length === 0) && Object.keys(existingDoc.metadata) > 0) { let userConfirmation = await this.confirm( 'It appears that the file you\'re uploading has lost its Mure metadata. ' + 'This is fairly common when you\'ve edited it with an external program.\n\n' + 'Restore the most recent metadata?'); if (!userConfirmation) { existingDoc.metadata = {}; existingDoc.metadataDigest = md5('{}'); } } } if (options.metadata) { existingDoc.metadata = options.metadata; existingDoc.metadataDigest = md5(JSON.stringify(options.metadata)); } return this.db.put(existingDoc); } catch (errorObj) { if (errorObj.message === 'missing') { // The file doesn't exist yet... let newDoc = { _id: options.filename, _attachments: {}, metadata: options.metadata || {}, metadataDigest: options.metadata ? md5(JSON.stringify(options.metadata)) : md5('{}') }; if (!options.blobOrBase64string) { this.trigger('error', 'Attempted to save a file without contents!'); } newDoc._attachments[options.filename] = { content_type: 'image/svg+xml', data: options.blobOrBase64string }; return this.db.put(newDoc); } else { this.catchDbError(errorObj); return Promise.reject(errorObj); } } } async getMetadata (filename) { let currentFile = await this.getFile(filename, this.CONTENT_FORMATS.exclude); return currentFile !== null ? currentFile.metadata : null; } async getCurrentFilename () { return this.db.get('userPrefs').then(prefs => { return prefs.currentFile; }); } async getFileList () { return this.db.allDocs() .then(response => { let result = []; response.rows.forEach(d => { if (d.id !== 'userPrefs') { result.push(d.id); } }); return result; }).catch(this.catchDbError); } async getFileRevisions () { return this.db.allDocs() .then(response => { let result = {}; response.rows.forEach(d => { if (d.id !== 'userPrefs') { result[d.id] = d.value.rev; } }); return result; }).catch(this.catchDbError); } async readFile (reader, fileObj) { return new Promise((resolve, reject) => { reader.onloadend = xmlText => { resolve(xmlText.target.result); }; reader.onerror = error => { reject(error); }; reader.onabort = () => { reject(this.SIGNALS.cancelled); }; reader.readAsText(fileObj); }); } async validateFileName (originalName, takenNames, abortFunction) { // Ask multiple times if the user happens to enter another filename that already exists let filename = originalName; while (takenNames[filename]) { filename = await this.prompt( filename + ' already exists. Pick a new name, or leave it the same to overwrite:', filename); if (filename === null) { if (abortFunction) { abortFunction(); } return Promise.reject(this.SIGNALS.cancelled); } else if (filename === '') { filename = await this.prompt('You must enter a file name (or click cancel to cancel the upload)'); } else if (filename === originalName) { // They left it the same... overwrite! return filename; } } return filename; } inferParser (fileObj) { let ext = fileObj.type.split('/')[1]; if (ext === 'csv') { return (contents) => { return datalib.read(contents, {type: 'csv', parse: 'auto'}); }; } else if (ext === 'tsv') { return (contents) => { return datalib.read(contents, {type: 'tsv', parse: 'auto'}); }; } else if (ext === 'dsv') { return (contents) => { return datalib.read(contents, {type: 'dsv', parse: 'auto'}); }; } else if (ext === 'json') { // TODO: attempt to auto-discover topojson or treejson? return (contents) => { return datalib.read(contents, {type: 'json', parse: 'auto'}); }; } else { return null; } } async uploadDataset (fileObj) { let parser = this.inferParser(fileObj); if (!parser) { let errorObj = new Error('Unknown data file type: ' + fileObj.type); this.trigger('error', errorObj); return Promise.reject(errorObj); } let metadata = await this.getMetadata(); if (metadata === null) { let errorObj = new Error('Can\'t embed a data file without an SVG file already open'); this.trigger('error', errorObj); return Promise.reject(errorObj); } metadata.datasets = metadata.datasets || {}; let reader = new window.FileReader(); let dataFileName = await this.validateFileName(fileObj.name, metadata.datasets, reader.abort); let fileText = await this.readFile(reader, fileObj); metadata.datasets[dataFileName] = parser(fileText); return this.saveFile({ metadata }); } async uploadSvg (fileObj) { let reader = new window.FileReader(); let contentsPromise = this.readFile(reader, fileObj) .then(xmlText => { let dom = new window.DOMParser().parseFromString(xmlText, 'image/svg+xml'); let contents = { metadata: this.extractMetadata(dom) }; contents.base64data = window.btoa(new window.XMLSerializer().serializeToString(dom)); return contents; }); let filenamePromise = this.getFileRevisions() .catch(this.catchDbError) .then(revisionDict => { return this.validateFileName(fileObj.name, revisionDict, reader.abort); }); return Promise.all([filenamePromise, contentsPromise]).then(([filename, contents]) => { return this.saveFile({ filename, blobOrBase64string: contents.base64data, metadata: contents.metadata }).then(() => { return this.setCurrentFile(filename); }); }).catch((errList) => { if (errList[0] !== this.SIGNALS.cancelled || errList[1] !== this.SIGNALS.cancelled) { // cancelling is not a problem; only reject if something else happened return Promise.reject(errList); } }); } async deleteSvg (filename) { let userConfirmation = await this.confirm('Are you sure you want to delete ' + filename + '?'); if (userConfirmation) { let currentFile = await this.getFile(filename, this.CONTENT_FORMATS.exclude); return this.db.remove(currentFile._id, currentFile._rev) .then(removeResponse => { if (this.lastFile && filename === this.lastFile._id) { return this.setCurrentFile(null).then(() => removeResponse); } return removeResponse; }); } else { return Promise.resolve(false); } } extractMetadata (dom) { let self = this; let metadata = {}; let d3dom = d3.select(dom.rootElement); // Extract the container for our metadata, if it exists let root = d3dom.select('#mure'); if (root.size() === 0) { return metadata; } let nsElement = root.select('mure'); if (nsElement.size() === 0) { return metadata; } // Any libraries? nsElement.selectAll('library').each(function (d) { if (!metadata.libraries) { metadata.libraries = []; } metadata.libraries.push(d3.select(this).attr('src')); }); // Any scripts? nsElement.selectAll('script').each(function (d) { let el = d3.select(this); let script = { text: self.extractCDATA(el.text()) }; let id = el.attr('id'); if (id) { if (id === 'mureInteractivityRunner') { // Don't store our interactivity runner script return; } script.id = id; } if (!metadata.scripts) { metadata.scripts = []; } metadata.scripts.push(script); }); // Any datasets? nsElement.selectAll('datasets').each(function (d) { let el = d3.select(this); if (!metadata.datasets) { metadata.datasets = {}; } metadata.datasets[el.attr('name')] = JSON.parse(self.extractCDATA(el.text())); }); // Any data bindings? nsElement.selectAll('binding').each(function (d) { let el = d3.select(this); let binding = { id: el.attr('id'), dataRoot: el.attr('dataroot'), svgRoot: el.attr('svgroot'), keyFunction: JSON.parse(self.extractCDATA(el.text())) }; if (!metadata.bindings) { metadata.bindings = {}; } metadata.bindings[binding.id] = binding; }); // Any encodings? nsElement.selectAll('encoding').each(function (d) { let el = d3.select(this); let encoding = { id: el.attr('id'), bindingId: el.attr('for'), spec: JSON.parse(self.extractCDATA(el.text())) }; if (!metadata.encodings) { metadata.encodings = {}; } metadata.encodings[encoding.id] = encoding; }); return metadata; } extractCDATA (str) { return str.replace(/(<!\[CDATA\[)/g, '').replace(/]]>/g, ''); } getEmptyBinding (metadata, add) { let id = 1; /* eslint-disable no-unmodified-loop-condition */ while (metadata.bindings && metadata.bindings['Binding Set ' + id]) { id++; } /* eslint-enable no-unmodified-loop-condition */ let newBinding = { id: 'Binding Set ' + id, svgRoot: ':root', dataRoot: metadata.datasets && Object.keys(metadata.datasets).length > 0 ? '$["' + Object.keys(metadata.datasets)[0] + '"]' : '', keyFunction: { expression: '(d, e) => d.key === e.index' } }; if (add) { if (!metadata.bindings) { metadata.bindings = {}; } metadata.bindings[newBinding.id] = newBinding; } return newBinding; } getEmptyEncoding (metadata, add) { let id = 1; /* eslint-disable no-unmodified-loop-condition */ while (metadata.encodings && metadata.encodings['Encoding ' + id]) { id++; } /* eslint-enable no-unmodified-loop-condition */ let newEncoding = { id: 'Encoding ' + id, bindingId: '', spec: {} }; if (add) { if (!metadata.encodings) { metadata.encodings = {}; } metadata.encodings[newEncoding.id] = newEncoding; } return newEncoding; } embedMetadata (dom, metadata) { let d3dom = d3.select(dom.rootElement); // Top: need a metadata tag let root = d3dom.selectAll('#mure').data([0]); root.exit().remove(); root = root.enter().append('metadata').attr('id', 'mure').merge(root); // Next down: a tag to define the namespace let nsElement = root.selectAll('mure').data([0]); nsElement.exit().remove(); nsElement = nsElement.enter().append('mure').attr('xmlns', this.NSString).merge(nsElement); // Okay, we're in our custom namespace... let's figure out the libraries let libraryList = metadata.libraries || []; let libraries = nsElement.selectAll('library').data(libraryList); libraries.exit().remove(); libraries = libraries.enter().append('library').merge(libraries); libraries.attr('src', d => d); // Let's deal with any user scripts let scriptList = metadata.scripts || []; let scripts = nsElement.selectAll('script').data(scriptList); scripts.exit().remove(); let scriptsEnter = scripts.enter().append('script'); scripts = scriptsEnter.merge(scripts); scripts.attr('id', d => d.id || null); scripts.each(function (d) { this.innerHTML = '<![CDATA[' + d.text + ']]>'; }); // Remove mureInteractivityRunner by default to ensure it always comes after the // metadata tag (of course, only bother adding it if we have any libraries or scripts) d3dom.select('#mureInteractivityRunner').remove(); if (libraryList.length > 0 || scriptList.length > 0) { d3dom.append('script') .attr('id', 'mureInteractivityRunner') .attr('type', 'text/javascript') .text('<![CDATA[' + mureInteractivityRunnerText + ']]'); } // We always store datasets as JSON let datasets = nsElement.selectAll('dataset').data(d3.entries(metadata.datasets || {})); datasets.exit().remove(); let datasetsEnter = datasets.enter().append('dataset'); datasets = datasetsEnter.merge(datasets); datasets.attr('name', d => d.key) .html(d => '<![CDATA[' + JSON.stringify(d.value) + ']]>'); // Store data bindings let bindings = nsElement.selectAll('binding').data(d3.values(metadata.bindings || {})); bindings.exit().remove(); let bindingsEnter = bindings.enter().append('binding'); bindings = bindingsEnter.merge(bindings); bindings .attr('id', d => d.id) .attr('dataroot', d => d.dataRoot) .attr('svgroot', d => d.svgRoot) .html(d => '<![CDATA[' + JSON.stringify(d.keyFunction) + ']]>'); // Store encoding metadata let encodings = nsElement.selectAll('encoding').data(d3.values(metadata.encodings || {})); encodings.exit().remove(); let encodingsEnter = encodings.enter().append('encoding'); encodings = encodingsEnter.merge(encodings); encodings .attr('id', d => d.id) .attr('bindingid', d => d.bindingId) .html(d => '<![CDATA[' + JSON.stringify(d.spec) + ']]>'); return dom; } async downloadSvg (filename) { let fileEntry = await this.getFile(filename, this.CONTENT_FORMATS.dom); if (!fileEntry) { throw new Error('Can\'t download non-existent file: ' + filename); } let dom = this.embedMetadata(fileEntry._attachments[fileEntry._id].dom, fileEntry.metadata); let xmlText = new window.XMLSerializer().serializeToString(dom) .replace(/&lt;!\[CDATA\[/g, '<!\[CDATA\[').replace(/]]&gt;/g, ']]>'); // create a fake link to initiate the download let a = document.createElement('a'); a.style = 'display:none'; let url = window.URL.createObjectURL(new window.Blob([xmlText], { type: 'image/svg+xml' })); a.href = url; a.download = filename; document.body.appendChild(a); a.click(); window.URL.revokeObjectURL(url); a.parentNode.removeChild(a); } matchDataPaths (path1, path2, metadata) { if (!metadata || !metadata.datasets || !path1 || !path2) { return false; } let result1 = jsonpath.query(metadata.datasets, path1); let result2 = jsonpath.query(metadata.datasets, path2); if (result1.length !== 1 || result2.length !== 1) { return false; } return result1[0] === result2[0]; } matchDomSelectors (selector1, selector2, dom) { if (!selector1 || !selector2) { return false; } let result1 = dom.querySelector(selector1); let result2 = dom.querySelector(selector2); return result1 === result2; } getMatches (metadata, dom) { let mapping = []; if (metadata && metadata.bindings && metadata.datasets && dom) { d3.values(metadata.bindings).forEach(binding => { mapping.push(...this.getMatchesForBinding(binding, metadata, dom)); }); } return mapping; } getMatchesForBinding (binding, metadata, dom) { if (!binding.dataRoot || !binding.svgRoot || !binding.keyFunction) { return []; } if (binding.keyFunction.customMapping) { return binding.keyFunction.customMapping; } /* eslint-disable no-eval */ let expression = (0, eval)(binding.keyFunction.expression); /* eslint-enable no-eval */ // Need to evaluate the expression for each n^2 possible pairing, and assign // mapping the first time the expression is true (but not after! // mapping can only be one-to-one!) let dataRoot = jsonpath.query(metadata.datasets, binding.dataRoot)[0]; let dataEntries; if (dataRoot instanceof Array) { dataEntries = dataRoot.map((d, i) => { return { key: i, value: d }; }); } else if (typeof dataRoot === 'object') { dataEntries = d3.entries(dataRoot); } else { return; // a leaf was picked as a root... no mapping possible } let svgRoot = dom.querySelector(binding.svgRoot); let svgItems = Array.from(svgRoot.children); let mapping = { links: [], svgLookup: {}, dataLookup: {} }; dataEntries.forEach(dataEntry => { for (let itemIndex = 0; itemIndex < svgItems.length; itemIndex += 1) { if (mapping.svgLookup[itemIndex] !== undefined) { // this svg element has already been matched with a different dataEntry continue; } let svgEntry = { index: itemIndex, element: svgItems[itemIndex] }; let expressionResult = null; try { expressionResult = expression(dataEntry, svgEntry); } catch (errorObj) { // todo: add interface helpers for debugging the expression throw errorObj; } if (expressionResult === true) { mapping.svgLookup[svgEntry.index] = mapping.links.length; mapping.dataLookup[dataEntry.key] = mapping.links.length; mapping.links.push({ dataEntry, svgEntry }); break; } else if (expressionResult !== false) { throw new Error('The expression must evaluate to true or false'); } } }); return mapping; } inferAllEncodings (binding, metadata, dom) { let mapping = this.getMatchesForBinding(binding, metadata, dom); // Trash all previous encodings associated with this binding if (metadata.encodings) { Object.keys(metadata.encodings).forEach(encodingId => { if (metadata.encodings[encodingId].bindingId === binding.id) { delete metadata.encodings[encodingId]; } }); } else { metadata.encodings = {}; } // Create / get cached distribution of values let dataDistributions = {}; let svgDistributions = {}; mapping.links.forEach(link => { Object.keys(link.dataEntry.value).forEach(attr => { let value = link.dataEntry.value[attr]; if (typeof value === 'string' || typeof value === 'number') { dataDistributions[attr] = dataDistributions[attr] || {}; dataDistributions[attr][value] = (dataDistributions[attr][value] || 0) + 1; } }); svgDistributions._tagName = svgDistributions._tagName || {}; svgDistributions._tagName[link.svgEntry.element.tagName] = (svgDistributions._tagName[link.svgEntry.element.tagName] || 0) + 1; Array.from(link.svgEntry.element.attributes).forEach(attrObj => { let attr = attrObj.name; let value = link.svgEntry.element.getAttribute(attr); if (typeof value === 'string' || typeof value === 'number') { svgDistributions[attr] = svgDistributions[attr] || {}; svgDistributions[attr][value] = (svgDistributions[attr][value] || 0) + 1; } }); }); // Generate all potential svg constant rules // TODO: infer data constants as well if we ever get around to // supporting the data cleaning use case Object.keys(svgDistributions).forEach(attr => { let encoding = this.getEmptyEncoding(metadata, true); encoding.bindingId = binding.id; encoding.spec.type = this.ENCODING_TYPES.constant; encoding.spec.attribute = attr; // Figure out the bin with the highest count, while calculating the error let value = null; let maxBinCount = 0; let totalCount = 0; Object.keys(svgDistributions[attr]).forEach(binLabel => { let binCount = svgDistributions[attr][binLabel]; totalCount += binCount; if (binCount > maxBinCount) { value = binLabel; maxBinCount = binCount; } }); encoding.spec.value = value; encoding.spec.error = (totalCount - maxBinCount) / totalCount; // Don't initially enable constants unless they're 100% accurate encoding.spec.enabled = encoding.spec.error === 0; }); // TODO: generate linear, log, other model rules this.saveFile({ metadata }); } } let mure = new Mure(); export default mure;
src/mure.js
/* eslint no-useless-escape: 0 */ import 'babel-polyfill'; import * as d3 from 'd3'; import datalib from 'datalib'; import md5 from 'md5'; import * as jsonpath from 'jsonpath'; import PouchDB from 'pouchdb'; import { Model } from 'uki'; import appList from './appList.json'; import mureInteractivityRunnerText from './mureInteractivityRunner.js'; class Mure extends Model { constructor () { super(); this.appList = appList; // Check if we're even being used in the browser (mostly useful for getting // access to the applist in all-apps-dev-server.js) if (typeof document === 'undefined' || typeof window === 'undefined') { return; } // Enumerations... this.VALID_EVENTS = { fileListChange: {}, fileChange: {}, domChange: {}, metadataChange: {}, error: {} }; this.CONTENT_FORMATS = { exclude: 0, blob: 1, dom: 2, base64: 3 }; this.SIGNALS = { cancelled: {} }; // The namespace string for our custom XML this.NSString = 'http://mure-apps.github.io'; d3.namespaces.mure = this.NSString; // Funky stuff to figure out if we're debugging (if that's the case, we want to use // localhost instead of the github link for all links) let windowTitle = document.getElementsByTagName('title')[0]; windowTitle = windowTitle ? windowTitle.textContent : ''; this.debugMode = window.location.hostname === 'localhost' && windowTitle.startsWith('Mure'); // Figure out which app we are (or null if the mure library is being used somewhere else) this.currentApp = window.location.pathname.replace(/\//g, ''); if (!this.appList[this.currentApp]) { this.currentApp = null; } // Create / load the local database of files this.lastFile = null; this.db = this.getOrInitDb(); // default error handling (apps can listen for / display error messages in addition to this): this.on('error', errorMessage => { console.warn(errorMessage); }); this.catchDbError = errorObj => { this.trigger('error', 'Unexpected error reading PouchDB: ' + errorObj.message + '\n' + errorObj.stack); }; // in the absence of a custom dialogs, just use window.alert, window.confirm and window.prompt: this.alert = (message) => { return new Promise((resolve, reject) => { window.alert(message); resolve(true); }); }; this.confirm = (message) => { return new Promise((resolve, reject) => { resolve(window.confirm(message)); }); }; this.prompt = (message, defaultValue) => { return new Promise((resolve, reject) => { resolve(window.prompt(message, defaultValue)); }); }; } on (eventName, callback) { if (!this.VALID_EVENTS[eventName]) { throw new Error('Unknown event name: ' + eventName); } else { super.on(eventName, callback); } } customizeAlertDialog (showDialogFunction) { this.alert = showDialogFunction; } customizeConfirmDialog (showDialogFunction) { this.confirm = showDialogFunction; } customizePromptDialog (showDialogFunction) { this.prompt = showDialogFunction; } openApp (appName, newTab) { if (newTab) { window.open('/' + appName, '_blank'); } else { window.location.pathname = '/' + appName; } } getOrInitDb () { let db = new PouchDB('mure'); (async () => { this.lastFile = null; try { let prefs = await db.get('userPrefs'); if (prefs.currentFile) { this.lastFile = await this.getFile(prefs.currentFile); } } catch (errorObj) { if (errorObj.message === 'missing') { db.put({ _id: 'userPrefs', currentFile: null }); } else { this.catchDbError(errorObj); } } })(); db.changes({ since: 'now', live: true, include_docs: true }).on('change', change => { let fileChanged; let fileListChanged; let domChanged; let metadataChanged; if (change.deleted) { if (change.doc._id !== this.lastFile._id) { // Weird corner case: if we just deleted a file that wasn't the current one, // we won't ever get a change event on the userPrefs object; in that case, // we need to trigger the fileListChanged event immediately (async () => { let fileList = await this.getFileList(); this.trigger('fileListChange', fileList); })().catch(this.catchDbError); } // Whether or not the deleted file was the currently open one, don't // trigger any other events; we want events to fire in context of the // new stuff getting loaded below return; } let currentFile; if (change.doc._id === 'userPrefs') { // We just changed the currently open file; trigger all the events fileChanged = fileListChanged = domChanged = metadataChanged = true; currentFile = this.lastFile; } else { if (this.lastFile === null || this.lastFile._id !== change.doc._id) { // The file itself is changing; DON'T actually trigger any of the events // because userPrefs is about to be changed as well... and things listening // to changes that care about checking userPrefs will want to do it with // its value updated fileChanged = fileListChanged = domChanged = metadataChanged = false; } else { fileChanged = false; domChanged = this.lastFile._attachments[this.lastFile._id].digest !== change.doc._attachments[change.doc._id].digest; metadataChanged = this.lastFile.metadataDigest !== change.doc.metadataDigest; } this.lastFile = currentFile = change.doc; } if (fileChanged) { this.trigger('fileChange', currentFile); } if (fileListChanged) { (async () => { let fileList = await this.getFileList(); this.trigger('fileListChange', fileList); })().catch(this.catchDbError); } if (domChanged) { (async () => { let blob = currentFile ? await this.getFileAsBlob(currentFile._id) : null; this.trigger('domChange', blob); })(); } if (metadataChanged) { this.trigger('metadataChange', currentFile ? currentFile.metadata : null); } }).on('error', errorObj => { this.catchDbError(errorObj); }); return db; } async setCurrentFile (filename) { return this.db.get('userPrefs').then(prefs => { prefs.currentFile = filename; return this.db.put(prefs); }).catch(this.catchDbError); } async getFile (filename, contentFormat) { if (!filename) { filename = await this.getCurrentFilename(); } let pouchdbOptions = {}; if (contentFormat !== this.CONTENT_FORMATS.exclude) { pouchdbOptions.attachments = true; if (contentFormat === this.CONTENT_FORMATS.blob) { pouchdbOptions.binary = true; } } if (filename !== null) { return this.db.get(filename, pouchdbOptions || {}) .then(fileObj => { if (contentFormat === this.CONTENT_FORMATS.dom) { let xmlText = window.atob(fileObj._attachments[fileObj._id].data); let dom = new window.DOMParser().parseFromString(xmlText, 'image/svg+xml'); fileObj._attachments[fileObj._id].dom = dom; } return fileObj; }); } else { return Promise.resolve(null); } } async saveFile (options) { try { let existingDoc; if (!options.blobOrBase64string) { existingDoc = await this.getFile(options.filename, this.CONTENT_FORMATS.exclude); } else { existingDoc = await this.getFile(options.filename, this.CONTENT_FORMATS.blob); existingDoc._attachments[options.filename].data = options.blobOrBase64string; if ((!options.metadata || Object.keys(options.metadata).length === 0) && Object.keys(existingDoc.metadata) > 0) { let userConfirmation = await this.confirm( 'It appears that the file you\'re uploading has lost its Mure metadata. ' + 'This is fairly common when you\'ve edited it with an external program.\n\n' + 'Restore the most recent metadata?'); if (!userConfirmation) { existingDoc.metadata = {}; existingDoc.metadataDigest = md5('{}'); } } } if (options.metadata) { existingDoc.metadata = options.metadata; existingDoc.metadataDigest = md5(JSON.stringify(options.metadata)); } return this.db.put(existingDoc); } catch (errorObj) { if (errorObj.message === 'missing') { // The file doesn't exist yet... let newDoc = { _id: options.filename, _attachments: {}, metadata: options.metadata || {}, metadataDigest: options.metadata ? md5(JSON.stringify(options.metadata)) : md5('{}') }; if (!options.blobOrBase64string) { this.trigger('error', 'Attempted to save a file without contents!'); } newDoc._attachments[options.filename] = { content_type: 'image/svg+xml', data: options.blobOrBase64string }; return this.db.put(newDoc); } else { this.catchDbError(errorObj); return Promise.reject(errorObj); } } } async getMetadata (filename) { let currentFile = await this.getFile(filename, this.CONTENT_FORMATS.exclude); return currentFile !== null ? currentFile.metadata : null; } async getCurrentFilename () { return this.db.get('userPrefs').then(prefs => { return prefs.currentFile; }); } async getFileList () { return this.db.allDocs() .then(response => { let result = []; response.rows.forEach(d => { if (d.id !== 'userPrefs') { result.push(d.id); } }); return result; }).catch(this.catchDbError); } async getFileRevisions () { return this.db.allDocs() .then(response => { let result = {}; response.rows.forEach(d => { if (d.id !== 'userPrefs') { result[d.id] = d.value.rev; } }); return result; }).catch(this.catchDbError); } async readFile (reader, fileObj) { return new Promise((resolve, reject) => { reader.onloadend = xmlText => { resolve(xmlText.target.result); }; reader.onerror = error => { reject(error); }; reader.onabort = () => { reject(this.SIGNALS.cancelled); }; reader.readAsText(fileObj); }); } async validateFileName (originalName, takenNames, abortFunction) { // Ask multiple times if the user happens to enter another filename that already exists let filename = originalName; while (takenNames[filename]) { filename = await this.prompt( filename + ' already exists. Pick a new name, or leave it the same to overwrite:', filename); if (filename === null) { if (abortFunction) { abortFunction(); } return Promise.reject(this.SIGNALS.cancelled); } else if (filename === '') { filename = await this.prompt('You must enter a file name (or click cancel to cancel the upload)'); } else if (filename === originalName) { // They left it the same... overwrite! return filename; } } return filename; } inferParser (fileObj) { let ext = fileObj.type.split('/')[1]; if (ext === 'csv') { return (contents) => { return datalib.read(contents, {type: 'csv', parse: 'auto'}); }; } else if (ext === 'tsv') { return (contents) => { return datalib.read(contents, {type: 'tsv', parse: 'auto'}); }; } else if (ext === 'dsv') { return (contents) => { return datalib.read(contents, {type: 'dsv', parse: 'auto'}); }; } else if (ext === 'json') { // TODO: attempt to auto-discover topojson or treejson? return (contents) => { return datalib.read(contents, {type: 'json', parse: 'auto'}); }; } else { return null; } } async uploadDataset (fileObj) { let parser = this.inferParser(fileObj); if (!parser) { let errorObj = new Error('Unknown data file type: ' + fileObj.type); this.trigger('error', errorObj); return Promise.reject(errorObj); } let metadata = await this.getMetadata(); if (metadata === null) { let errorObj = new Error('Can\'t embed a data file without an SVG file already open'); this.trigger('error', errorObj); return Promise.reject(errorObj); } metadata.datasets = metadata.datasets || {}; let reader = new window.FileReader(); let dataFileName = await this.validateFileName(fileObj.name, metadata.datasets, reader.abort); let fileText = await this.readFile(reader, fileObj); metadata.datasets[dataFileName] = parser(fileText); return this.saveFile({ metadata }); } async uploadSvg (fileObj) { let reader = new window.FileReader(); let contentsPromise = this.readFile(reader, fileObj) .then(xmlText => { let dom = new window.DOMParser().parseFromString(xmlText, 'image/svg+xml'); let contents = { metadata: this.extractMetadata(dom) }; contents.base64data = window.btoa(new window.XMLSerializer().serializeToString(dom)); return contents; }); let filenamePromise = this.getFileRevisions() .catch(this.catchDbError) .then(revisionDict => { return this.validateFileName(fileObj.name, revisionDict, reader.abort); }); return Promise.all([filenamePromise, contentsPromise]).then(([filename, contents]) => { return this.saveFile({ filename, blobOrBase64string: contents.base64data, metadata: contents.metadata }).then(() => { return this.setCurrentFile(filename); }); }).catch((errList) => { if (errList[0] !== this.SIGNALS.cancelled || errList[1] !== this.SIGNALS.cancelled) { // cancelling is not a problem; only reject if something else happened return Promise.reject(errList); } }); } async deleteSvg (filename) { let userConfirmation = await this.confirm('Are you sure you want to delete ' + filename + '?'); if (userConfirmation) { let currentFile = await this.getFile(filename, this.CONTENT_FORMATS.exclude); return this.db.remove(currentFile._id, currentFile._rev) .then(removeResponse => { if (this.lastFile && filename === this.lastFile._id) { return this.setCurrentFile(null).then(() => removeResponse); } return removeResponse; }); } else { return Promise.resolve(false); } } extractMetadata (dom) { let self = this; let metadata = {}; let d3dom = d3.select(dom.rootElement); // Extract the container for our metadata, if it exists let root = d3dom.select('#mure'); if (root.size() === 0) { return metadata; } let nsElement = root.select('mure'); if (nsElement.size() === 0) { return metadata; } // Any libraries? nsElement.selectAll('library').each(function (d) { if (!metadata.libraries) { metadata.libraries = []; } metadata.libraries.push(d3.select(this).attr('src')); }); // Any scripts? nsElement.selectAll('script').each(function (d) { let el = d3.select(this); let script = { text: self.extractCDATA(el.text()) }; let id = el.attr('id'); if (id) { if (id === 'mureInteractivityRunner') { // Don't store our interactivity runner script return; } script.id = id; } if (!metadata.scripts) { metadata.scripts = []; } metadata.scripts.push(script); }); // Any datasets? nsElement.selectAll('datasets').each(function (d) { let el = d3.select(this); if (!metadata.datasets) { metadata.datasets = {}; } metadata.datasets[el.attr('name')] = JSON.parse(self.extractCDATA(el.text())); }); // Any data bindings? nsElement.selectAll('binding').each(function (d) { let el = d3.select(this); let binding = { id: el.attr('id'), dataRoot: el.attr('dataroot'), svgRoot: el.attr('svgroot'), keyFunction: JSON.parse(self.extractCDATA(el.text())) }; if (!metadata.bindings) { metadata.bindings = {}; } metadata.bindings[binding.id] = binding; }); // Any encodings? nsElement.selectAll('encoding').each(function (d) { let el = d3.select(this); let encoding = { id: el.attr('id'), bindingId: el.attr('for'), spec: JSON.parse(self.extractCDATA(el.text())) }; if (!metadata.encodings) { metadata.encodings = {}; } metadata.encodings[encoding.id] = encoding; }); return metadata; } extractCDATA (str) { return str.replace(/(<!\[CDATA\[)/g, '').replace(/]]>/g, ''); } getEmptyBinding (metadata, add) { let id = 1; /* eslint-disable no-unmodified-loop-condition */ while (metadata.bindings && metadata.bindings['Binding' + id]) { id++; } /* eslint-enable no-unmodified-loop-condition */ let newBinding = { id: 'Binding Set ' + id, svgRoot: ':root', dataRoot: metadata.datasets && Object.keys(metadata.datasets).length > 0 ? '$["' + Object.keys(metadata.datasets)[0] + '"]' : '', keyFunction: { expression: '(d, e) => d.key === e.index' } }; if (add) { if (!metadata.bindings) { metadata.bindings = {}; } metadata.bindings[newBinding.id] = newBinding; } return newBinding; } getEmptyEncoding (metadata, add) { let id = 1; /* eslint-disable no-unmodified-loop-condition */ while (metadata.encodings && metadata.encodings['Encoding' + id]) { id++; } /* eslint-enable no-unmodified-loop-condition */ let newEncoding = { id: 'Encoding' + id, bindingId: '', spec: {} }; if (add) { if (!metadata.encodings) { metadata.encodings = {}; } metadata.encodings[newEncoding.id] = newEncoding; } return newEncoding; } embedMetadata (dom, metadata) { let d3dom = d3.select(dom.rootElement); // Top: need a metadata tag let root = d3dom.selectAll('#mure').data([0]); root.exit().remove(); root = root.enter().append('metadata').attr('id', 'mure').merge(root); // Next down: a tag to define the namespace let nsElement = root.selectAll('mure').data([0]); nsElement.exit().remove(); nsElement = nsElement.enter().append('mure').attr('xmlns', this.NSString).merge(nsElement); // Okay, we're in our custom namespace... let's figure out the libraries let libraryList = metadata.libraries || []; let libraries = nsElement.selectAll('library').data(libraryList); libraries.exit().remove(); libraries = libraries.enter().append('library').merge(libraries); libraries.attr('src', d => d); // Let's deal with any user scripts let scriptList = metadata.scripts || []; let scripts = nsElement.selectAll('script').data(scriptList); scripts.exit().remove(); let scriptsEnter = scripts.enter().append('script'); scripts = scriptsEnter.merge(scripts); scripts.attr('id', d => d.id || null); scripts.each(function (d) { this.innerHTML = '<![CDATA[' + d.text + ']]>'; }); // Remove mureInteractivityRunner by default to ensure it always comes after the // metadata tag (of course, only bother adding it if we have any libraries or scripts) d3dom.select('#mureInteractivityRunner').remove(); if (libraryList.length > 0 || scriptList.length > 0) { d3dom.append('script') .attr('id', 'mureInteractivityRunner') .attr('type', 'text/javascript') .text('<![CDATA[' + mureInteractivityRunnerText + ']]'); } // We always store datasets as JSON let datasets = nsElement.selectAll('dataset').data(d3.entries(metadata.datasets || {})); datasets.exit().remove(); let datasetsEnter = datasets.enter().append('dataset'); datasets = datasetsEnter.merge(datasets); datasets.attr('name', d => d.key) .html(d => '<![CDATA[' + JSON.stringify(d.value) + ']]>'); // Store data bindings let bindings = nsElement.selectAll('binding').data(d3.values(metadata.bindings || {})); bindings.exit().remove(); let bindingsEnter = bindings.enter().append('binding'); bindings = bindingsEnter.merge(bindings); bindings .attr('id', d => d.id) .attr('dataroot', d => d.dataRoot) .attr('svgroot', d => d.svgRoot) .html(d => '<![CDATA[' + JSON.stringify(d.keyFunction) + ']]>'); // Store encoding metadata let encodings = nsElement.selectAll('encoding').data(d3.values(metadata.encodings || {})); encodings.exit().remove(); let encodingsEnter = encodings.enter().append('encoding'); encodings = encodingsEnter.merge(encodings); encodings .attr('id', d => d.id) .attr('bindingid', d => d.bindingId) .html(d => '<![CDATA[' + JSON.stringify(d.spec) + ']]>'); return dom; } async downloadSvg (filename) { let fileEntry = await this.getFile(filename, this.CONTENT_FORMATS.dom); if (!fileEntry) { throw new Error('Can\'t download non-existent file: ' + filename); } let dom = this.embedMetadata(fileEntry._attachments[fileEntry._id].dom, fileEntry.metadata); let xmlText = new window.XMLSerializer().serializeToString(dom) .replace(/&lt;!\[CDATA\[/g, '<!\[CDATA\[').replace(/]]&gt;/g, ']]>'); // create a fake link to initiate the download let a = document.createElement('a'); a.style = 'display:none'; let url = window.URL.createObjectURL(new window.Blob([xmlText], { type: 'image/svg+xml' })); a.href = url; a.download = filename; document.body.appendChild(a); a.click(); window.URL.revokeObjectURL(url); a.parentNode.removeChild(a); } matchDataPaths (path1, path2, metadata) { if (!metadata || !metadata.datasets || !path1 || !path2) { return false; } let result1 = jsonpath.query(metadata.datasets, path1); let result2 = jsonpath.query(metadata.datasets, path2); if (result1.length !== 1 || result2.length !== 1) { return false; } return result1[0] === result2[0]; } matchDomSelectors (selector1, selector2, dom) { if (!selector1 || !selector2) { return false; } let result1 = dom.querySelector(selector1); let result2 = dom.querySelector(selector2); return result1 === result2; } getMatches (metadata, dom) { let mapping = []; if (metadata && metadata.bindings && metadata.datasets && dom) { d3.values(metadata.bindings).forEach(binding => { mapping.push(...this.getMatchesForBinding(binding, metadata, dom)); }); } return mapping; } getMatchesForBinding (binding, metadata, dom) { if (!binding.dataRoot || !binding.svgRoot || !binding.keyFunction) { return []; } if (binding.keyFunction.customMapping) { return binding.keyFunction.customMapping; } /* eslint-disable no-eval */ let expression = (0, eval)(binding.keyFunction.expression); /* eslint-enable no-eval */ // Need to evaluate the expression for each n^2 possible pairing, and assign // mapping the first time the expression is true (but not after! // mapping can only be one-to-one!) let dataRoot = jsonpath.query(metadata.datasets, binding.dataRoot)[0]; let dataEntries; if (dataRoot instanceof Array) { dataEntries = dataRoot.map((d, i) => { return { key: i, value: d }; }); } else if (typeof dataRoot === 'object') { dataEntries = d3.entries(dataRoot); } else { return; // a leaf was picked as a root... no mapping possible } let svgRoot = dom.querySelector(binding.svgRoot); let svgItems = Array.from(svgRoot.children); let mapping = { links: [], svgLookup: {}, dataLookup: {} }; dataEntries.forEach(dataEntry => { for (let itemIndex = 0; itemIndex < svgItems.length; itemIndex += 1) { if (mapping.svgLookup[itemIndex] !== undefined) { // this svg element has already been matched with a different dataEntry continue; } let svgEntry = { index: itemIndex, element: svgItems[itemIndex] }; let expressionResult = null; try { expressionResult = expression(dataEntry, svgEntry); } catch (errorObj) { // todo: add interface helpers for debugging the expression throw errorObj; } if (expressionResult === true) { mapping.svgLookup[svgEntry.index] = mapping.links.length; mapping.dataLookup[dataEntry.key] = mapping.links.length; mapping.links.push({ dataEntry, svgEntry }); break; } else if (expressionResult !== false) { throw new Error('The expression must evaluate to true or false'); } } }); return mapping; } inferAllEncodings (binding, metadata, dom) { let mapping = this.getMatchesForBinding(binding, metadata, dom); // Create / get cached distribution of values let dataDistributions = {}; let svgDistributions = {}; mapping.links.forEach(link => { Object.keys(link.dataEntry.value).forEach(attr => { let value = link.dataEntry.value[attr]; if (typeof value === 'string' || typeof value === 'number') { dataDistributions[attr] = dataDistributions[attr] || {}; dataDistributions[attr][value] = (dataDistributions[attr][value] || 0) + 1; } }); svgDistributions._tagName = svgDistributions._tagName || {}; svgDistributions._tagName[link.svgEntry.element.tagName] = (svgDistributions._tagName[link.svgEntry.element.tagName] || 0) + 1; Array.from(link.svgEntry.element.attributes).forEach(attrObj => { let attr = attrObj.name; let value = link.svgEntry.element.getAttribute(attr); if (typeof value === 'string' || typeof value === 'number') { svgDistributions[attr] = svgDistributions[attr] || {}; svgDistributions[attr][value] = (svgDistributions[attr][value] || 0) + 1; } }); }); let quantitativePool = {}; let categoricalPool = {}; } } let mure = new Mure(); export default mure;
Learn constant constraints (not really "encodings" yet)
src/mure.js
Learn constant constraints (not really "encodings" yet)
<ide><path>rc/mure.js <ide> <ide> this.SIGNALS = { <ide> cancelled: {} <add> }; <add> <add> this.ENCODING_TYPES = { <add> constant: 0 <ide> }; <ide> <ide> // The namespace string for our custom XML <ide> getEmptyBinding (metadata, add) { <ide> let id = 1; <ide> /* eslint-disable no-unmodified-loop-condition */ <del> while (metadata.bindings && metadata.bindings['Binding' + id]) { <add> while (metadata.bindings && metadata.bindings['Binding Set ' + id]) { <ide> id++; <ide> } <ide> /* eslint-enable no-unmodified-loop-condition */ <ide> getEmptyEncoding (metadata, add) { <ide> let id = 1; <ide> /* eslint-disable no-unmodified-loop-condition */ <del> while (metadata.encodings && metadata.encodings['Encoding' + id]) { <add> while (metadata.encodings && metadata.encodings['Encoding ' + id]) { <ide> id++; <ide> } <ide> /* eslint-enable no-unmodified-loop-condition */ <ide> let newEncoding = { <del> id: 'Encoding' + id, <add> id: 'Encoding ' + id, <ide> bindingId: '', <ide> spec: {} <ide> }; <ide> inferAllEncodings (binding, metadata, dom) { <ide> let mapping = this.getMatchesForBinding(binding, metadata, dom); <ide> <add> // Trash all previous encodings associated with this binding <add> if (metadata.encodings) { <add> Object.keys(metadata.encodings).forEach(encodingId => { <add> if (metadata.encodings[encodingId].bindingId === binding.id) { <add> delete metadata.encodings[encodingId]; <add> } <add> }); <add> } else { <add> metadata.encodings = {}; <add> } <add> <ide> // Create / get cached distribution of values <ide> let dataDistributions = {}; <ide> let svgDistributions = {}; <ide> }); <ide> }); <ide> <del> let quantitativePool = {}; <del> let categoricalPool = {}; <add> // Generate all potential svg constant rules <add> // TODO: infer data constants as well if we ever get around to <add> // supporting the data cleaning use case <add> Object.keys(svgDistributions).forEach(attr => { <add> let encoding = this.getEmptyEncoding(metadata, true); <add> encoding.bindingId = binding.id; <add> encoding.spec.type = this.ENCODING_TYPES.constant; <add> encoding.spec.attribute = attr; <add> <add> // Figure out the bin with the highest count, while calculating the error <add> let value = null; <add> let maxBinCount = 0; <add> let totalCount = 0; <add> Object.keys(svgDistributions[attr]).forEach(binLabel => { <add> let binCount = svgDistributions[attr][binLabel]; <add> totalCount += binCount; <add> if (binCount > maxBinCount) { <add> value = binLabel; <add> maxBinCount = binCount; <add> } <add> }); <add> encoding.spec.value = value; <add> encoding.spec.error = (totalCount - maxBinCount) / totalCount; <add> <add> // Don't initially enable constants unless they're 100% accurate <add> encoding.spec.enabled = encoding.spec.error === 0; <add> }); <add> <add> // TODO: generate linear, log, other model rules <add> this.saveFile({ metadata }); <ide> } <ide> } <ide>
Java
apache-2.0
ea07ff5ce778a78249039d9f84c1dbab4856ebc4
0
googleinterns/step188-2020,googleinterns/step188-2020,googleinterns/step188-2020
package com.google.sps.utilities; import com.google.cloud.Date; import com.google.sps.data.Event; import com.google.sps.data.User; import com.google.sps.data.OpportunitySignup; import com.google.sps.data.User; import com.google.sps.data.VolunteeringOpportunity; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.Set; import java.util.UUID; /* Class containing utilities for testing. */ public class TestUtils { private static final String NAME = "Bob Smith"; private static final String EMAIL = "[email protected]"; private static final String EVENT_NAME = "Team Meeting"; private static final String DESCRIPTION = "Daily Team Sync"; private static final Set<String> LABELS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList("Tech", "Work"))); private static final String LOCATION = "Remote"; private static final Date DATE = Date.fromYearMonthDay(2016, 9, 15); private static final String DATE_STRING = "09/15/2016"; private static final String TIME = "3:00PM-5:00PM"; private static final User HOST = new User.Builder(NAME, EMAIL).build(); private static final String EVENT_ID = "0883de79-17d7-49a3-a866-dbd5135062a8"; private static final String OPPORTUNITY_NAME = "Performer"; private static final int NUMBER_OF_SPOTS = 240; private static final String VOLUNTEER_EMAIL = "[email protected]"; private static final Set<String> INTERESTS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList("Conservation", "Food"))); private static final Set<String> SKILLS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList("Cooking"))); /* * Returns a new Event object with the given user as host. * @param user host to be used to create a Event object * @return an event with given user as host */ public static Event newEventWithHost(User user) { return new Event.Builder(EVENT_NAME, DESCRIPTION, LABELS, LOCATION, DATE, TIME, user).build(); } /** Returns a new VolunteeringOpportunity object with arbitrary parameters. */ public static VolunteeringOpportunity newVolunteeringOpportunity() { return new VolunteeringOpportunity.Builder(EVENT_ID, OPPORTUNITY_NAME, NUMBER_OF_SPOTS).build(); } /* * Returns a new VolunteeringOpportunity object with the given eventId. * @param eventId event ID used to create a VolunteeringOpportunity object * @return a volunteering opportunity with the given event ID */ public static VolunteeringOpportunity newVolunteeringOpportunityWithEventId(String eventId) { return new VolunteeringOpportunity.Builder(eventId, OPPORTUNITY_NAME, NUMBER_OF_SPOTS).build(); } /* * Returns a new OpportunitySignup object with the given opportunityId. * @param opportunityId opportunity ID to create an OpportunitySignup object * @return an opportunity signup with given opportunity ID */ public static OpportunitySignup newOpportunitySignupWithOpportunityId(String opportunityId) { return new OpportunitySignup.Builder(opportunityId, VOLUNTEER_EMAIL).build(); } /** Returns a new User object with arbitrary attributes. */ public static User newUser() { return new User.Builder(NAME, EMAIL).setInterests(INTERESTS).setSkills(SKILLS).build(); } /** Returns a new User object with arbitrary attributes and given email. */ public static User newUserWithEmail(String email) { return new User.Builder(NAME, email).setInterests(INTERESTS).setSkills(SKILLS).build(); } /** Returns a random ID. */ public static String newRandomId() { return UUID.randomUUID().toString(); } }
project/src/main/java/com/google/sps/utilities/TestUtils.java
package com.google.sps.utilities; import com.google.cloud.Date; import com.google.sps.data.Event; import com.google.sps.data.User; import com.google.sps.data.OpportunitySignup; import com.google.sps.data.User; import com.google.sps.data.VolunteeringOpportunity; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.Set; import java.util.UUID; /* Class containing utilities for testing. */ public class TestUtils { private static final String NAME = "Bob Smith"; private static final String EMAIL = "[email protected]"; private static final String EVENT_NAME = "Team Meeting"; private static final String DESCRIPTION = "Daily Team Sync"; private static final Set<String> LABELS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList("Tech", "Work"))); private static final String LOCATION = "Remote"; private static final Date DATE = Date.fromYearMonthDay(2016, 9, 15); private static final String DATE_STRING = "09/15/2016"; private static final String TIME = "3:00PM-5:00PM"; private static final User HOST = new User.Builder(NAME, EMAIL).build(); private static final String EVENT_ID = "0883de79-17d7-49a3-a866-dbd5135062a8"; private static final String OPPORTUNITY_NAME = "Performer"; private static final int NUMBER_OF_SPOTS = 240; private static final String VOLUNTEER_EMAIL = "[email protected]"; private static final Set<String> INTERESTS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList("Conservation", "Food"))); private static final Set<String> SKILLS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList("Cooking"))); /* * Returns a new Event object with the given user as host. * @param user host to be used to create a Event object * @return an event with given user as host */ public static Event newEventWithHost(User user) { return new Event.Builder(EVENT_NAME, DESCRIPTION, LABELS, LOCATION, DATE, TIME, user).build(); } /** Returns a new VolunteeringOpportunity object with arbitrary parameters. */ public static VolunteeringOpportunity newVolunteeringOpportunity() { return new VolunteeringOpportunity.Builder(EVENT_ID, OPPORTUNITY_NAME, NUMBER_OF_SPOTS).build(); } /* * Returns a new VolunteeringOpportunity object with the given eventId. * @param eventId event ID used to create a VolunteeringOpportunity object * @return a volunteering opportunity with the given event ID */ public static VolunteeringOpportunity newVolunteeringOpportunityWithEventId(String eventId) { return new VolunteeringOpportunity.Builder(eventId, OPPORTUNITY_NAME, NUMBER_OF_SPOTS).build(); } /* * Returns a new OpportunitySignup object with the given opportunityId. * @param opportunityId opportunity ID to create an OpportunitySignup object * @return an opportunity signup with given opportunity ID */ public static OpportunitySignup newOpportunitySignupWithOpportunityId(String opportunityId) { return new OpportunitySignup.Builder(opportunityId, VOLUNTEER_EMAIL).build(); } /** Returns a new User object with arbitrary attributes. */ public static User newUser(String email) { return new User.Builder(USER_NAME, email).setInterests(INTERESTS).setSkills(SKILLS).build(); } /** Returns a random ID. */ public static String newRandomId() { return UUID.randomUUID().toString(); } }
Fix newUser definitions
project/src/main/java/com/google/sps/utilities/TestUtils.java
Fix newUser definitions
<ide><path>roject/src/main/java/com/google/sps/utilities/TestUtils.java <ide> } <ide> <ide> /** Returns a new User object with arbitrary attributes. */ <del> public static User newUser(String email) { <del> return new User.Builder(USER_NAME, email).setInterests(INTERESTS).setSkills(SKILLS).build(); <add> public static User newUser() { <add> return new User.Builder(NAME, EMAIL).setInterests(INTERESTS).setSkills(SKILLS).build(); <add> } <add> <add> /** Returns a new User object with arbitrary attributes and given email. */ <add> public static User newUserWithEmail(String email) { <add> return new User.Builder(NAME, email).setInterests(INTERESTS).setSkills(SKILLS).build(); <ide> } <ide> <ide> /** Returns a random ID. */
JavaScript
mit
71bab2f615eb21ca04e973d62a7046fb836c6722
0
jccazeaux/rivets,re-clone/rivets,zongkelong/rivets,GerHobbelt/rivets,zongkelong/rivets,altmind/rivets,mikeric/rivets,GerHobbelt/rivets,benderTheCrime/tiny-rivets,jccazeaux/rivets,moneyadviceservice/rivets,QAPInt/rivets,MishaMykhalyuk/rivets,GerHobbelt/rivets,altmind/rivets,kangax/rivets,QAPInt/rivets,MishaMykhalyuk/rivets,re-clone/rivets,mikeric/rivets,nopnop/rivets,zongkelong/rivets,nopnop/rivets,npmcomponent/mikeric-rivets,jccazeaux/rivets,mikeric/rivets,re-clone/rivets,nopnop/rivets,altmind/rivets,QAPInt/rivets,MishaMykhalyuk/rivets
describe('Rivets.Binding', function() { var model, el, view, binding; beforeEach(function() { rivets.configure({ adapter: { subscribe: function() {}, unsubscribe: function() {}, read: function() {}, publish: function() {} } }); el = document.createElement('div'); el.setAttribute('data-text', 'obj.name'); view = rivets.bind(el, {obj: {}}); binding = view.bindings[0]; model = binding.model; }); it('gets assigned the routine function matching the identifier', function() { expect(binding.routine).toBe(rivets.routines.text); }); describe('bind()', function() { it('subscribes to the model for changes via the adapter', function() { spyOn(rivets.config.adapter, 'subscribe'); binding.bind(); expect(rivets.config.adapter.subscribe).toHaveBeenCalledWith(model, 'name', binding.set); }); describe('with preloadData set to true', function() { beforeEach(function() { rivets.config.preloadData = true; }); it('sets the initial value via the adapter', function() { spyOn(binding, 'set'); spyOn(rivets.config.adapter, 'read'); binding.bind(); expect(rivets.config.adapter.read).toHaveBeenCalledWith(model, 'name'); expect(binding.set).toHaveBeenCalled(); }); }); describe('with the bypass option set to true', function() { beforeEach(function() { binding.options.bypass = true; }); it('sets the initial value from the model directly', function() { spyOn(binding, 'set'); binding.model.name = 'espresso'; binding.bind(); expect(binding.set).toHaveBeenCalledWith('espresso'); }); }); describe('with dependencies', function() { beforeEach(function() { binding.options.dependencies = ['fname', 'lname']; }); it('sets up observers on the dependant attributes', function() { spyOn(rivets.config.adapter, 'subscribe'); binding.bind(); expect(rivets.config.adapter.subscribe).toHaveBeenCalledWith(model, 'fname', binding.dependencyCallbacks['fname']); expect(rivets.config.adapter.subscribe).toHaveBeenCalledWith(model, 'lname', binding.dependencyCallbacks['lname']); }); }); }); describe('set()', function() { it('performs the binding routine with the supplied value', function() { spyOn(binding, 'routine'); binding.set('sweater'); expect(binding.routine).toHaveBeenCalledWith(el, 'sweater'); }); it('applies any formatters to the value before performing the routine', function() { rivets.config.formatters = { awesome: function(value) { return 'awesome ' + value } }; binding.formatters.push('awesome'); spyOn(binding, 'routine'); binding.set('sweater'); expect(binding.routine).toHaveBeenCalledWith(el, 'awesome sweater'); }); describe('on an event binding', function() { beforeEach(function() { binding.options.special = 'event'; }); it('performs the binding routine with the supplied function and current listener', function() { spyOn(binding, 'routine'); func = function() { return 1 + 2; } binding.set(func); expect(binding.routine).toHaveBeenCalledWith(el, func, undefined); }); it('passes the previously set funcation as the current listener on subsequent calls', function() { spyOn(binding, 'routine'); funca = function() { return 1 + 2; }; funcb = function() { return 2 + 5; }; binding.set(funca); expect(binding.routine).toHaveBeenCalledWith(el, funca, undefined); binding.set(funcb); expect(binding.routine).toHaveBeenCalledWith(el, funcb, funca); }); }); }); describe('publish()', function() { it("should publish the value of a number input", function() { numberInput = document.createElement('input'); numberInput.setAttribute('type', 'number'); numberInput.setAttribute('data-value', 'obj.num'); view = rivets.bind(numberInput, {obj: {num: 42}}); binding = view.bindings[0]; model = binding.model; numberInput.value = 42; spyOn(rivets.config.adapter, 'publish'); binding.publish({target: numberInput}); expect(rivets.config.adapter.publish).toHaveBeenCalledWith(model, 'num', '42'); }); }); describe('formattedValue()', function() { it('applies the current formatters on the supplied value', function() { rivets.config.formatters = { awesome: function(value) { return 'awesome ' + value } }; binding.formatters.push('awesome'); expect(binding.formattedValue('hat')).toBe('awesome hat'); }); it('uses formatters on the model', function() { model.modelAwesome = function(value) { return 'model awesome ' + value }; binding.formatters.push('modelAwesome'); expect(binding.formattedValue('hat')).toBe('model awesome hat'); }); describe('with a multi-argument formatter string', function() { beforeEach(function() { rivets.config.formatters = { awesome: function(value, prefix) { return prefix + ' awesome ' + value; } }; binding.formatters.push('awesome super'); }); it('applies the formatter with arguments', function() { expect(binding.formattedValue('jacket')).toBe('super awesome jacket'); }); }); }); });
spec/rivets/binding.js
describe('Rivets.Binding', function() { var model, el, view, binding; beforeEach(function() { rivets.configure({ adapter: { subscribe: function() {}, unsubscribe: function() {}, read: function() {}, publish: function() {} } }); el = document.createElement('div'); el.setAttribute('data-text', 'obj.name'); view = rivets.bind(el, {obj: {}}); binding = view.bindings[0]; model = binding.model; }); it('gets assigned the routine function matching the identifier', function() { expect(binding.routine).toBe(rivets.routines.text); }); describe('bind()', function() { it('subscribes to the model for changes via the adapter', function() { spyOn(rivets.config.adapter, 'subscribe'); binding.bind(); expect(rivets.config.adapter.subscribe).toHaveBeenCalled(); }); describe('with preloadData set to true', function() { beforeEach(function() { rivets.config.preloadData = true; }); it('sets the initial value via the adapter', function() { spyOn(binding, 'set'); spyOn(rivets.config.adapter, 'read'); binding.bind(); expect(binding.set).toHaveBeenCalled(); expect(rivets.config.adapter.read).toHaveBeenCalled(); }); }); describe('with the bypass option set to true', function() { beforeEach(function() { binding.options.bypass = true; }); it('sets the initial value from the model directly', function() { spyOn(binding, 'set'); binding.model.name = 'espresso'; binding.bind(); expect(binding.set).toHaveBeenCalledWith('espresso'); }); }); }); describe('set()', function() { it('performs the binding routine with the supplied value', function() { spyOn(binding, 'routine'); binding.set('sweater'); expect(binding.routine).toHaveBeenCalledWith(el, 'sweater'); }); it('applies any formatters to the value before performing the routine', function() { rivets.config.formatters = { awesome: function(value) { return 'awesome ' + value } }; binding.formatters.push('awesome'); spyOn(binding, 'routine'); binding.set('sweater'); expect(binding.routine).toHaveBeenCalledWith(el, 'awesome sweater'); }); describe('on an event binding', function() { beforeEach(function() { binding.options.special = 'event'; }); it('performs the binding routine with the supplied function and current listener', function() { spyOn(binding, 'routine'); func = function() { return 1 + 2; } binding.set(func); expect(binding.routine).toHaveBeenCalledWith(el, func, undefined); }); it('passes the previously set funcation as the current listener on subsequent calls', function() { spyOn(binding, 'routine'); funca = function() { return 1 + 2; }; funcb = function() { return 2 + 5; }; binding.set(funca); expect(binding.routine).toHaveBeenCalledWith(el, funca, undefined); binding.set(funcb); expect(binding.routine).toHaveBeenCalledWith(el, funcb, funca); }); }); }); describe('publish()', function() { it("should publish the value of a number input", function() { numberInput = document.createElement('input'); numberInput.setAttribute('type', 'number'); numberInput.setAttribute('data-value', 'obj.num'); view = rivets.bind(numberInput, {obj: {num: 42}}); binding = view.bindings[0]; model = binding.model; numberInput.value = 42; spyOn(rivets.config.adapter, 'publish'); binding.publish({target: numberInput}); expect(rivets.config.adapter.publish).toHaveBeenCalledWith(model, 'num', '42'); }); }); describe('formattedValue()', function() { it('applies the current formatters on the supplied value', function() { rivets.config.formatters = { awesome: function(value) { return 'awesome ' + value } }; binding.formatters.push('awesome'); expect(binding.formattedValue('hat')).toBe('awesome hat'); }); it('uses formatters on the model', function() { model.modelAwesome = function(value) { return 'model awesome ' + value }; binding.formatters.push('modelAwesome'); expect(binding.formattedValue('hat')).toBe('model awesome hat'); }); describe('with a multi-argument formatter string', function() { beforeEach(function() { rivets.config.formatters = { awesome: function(value, prefix) { return prefix + ' awesome ' + value; } }; binding.formatters.push('awesome super'); }); it('applies the formatter with arguments', function() { expect(binding.formattedValue('jacket')).toBe('super awesome jacket'); }); }); }); });
Add initial spec for binding with dependent attributes.
spec/rivets/binding.js
Add initial spec for binding with dependent attributes.
<ide><path>pec/rivets/binding.js <ide> it('subscribes to the model for changes via the adapter', function() { <ide> spyOn(rivets.config.adapter, 'subscribe'); <ide> binding.bind(); <del> expect(rivets.config.adapter.subscribe).toHaveBeenCalled(); <add> expect(rivets.config.adapter.subscribe).toHaveBeenCalledWith(model, 'name', binding.set); <ide> }); <ide> <ide> describe('with preloadData set to true', function() { <ide> spyOn(binding, 'set'); <ide> spyOn(rivets.config.adapter, 'read'); <ide> binding.bind(); <add> expect(rivets.config.adapter.read).toHaveBeenCalledWith(model, 'name'); <ide> expect(binding.set).toHaveBeenCalled(); <del> expect(rivets.config.adapter.read).toHaveBeenCalled(); <ide> }); <ide> }); <ide> <ide> binding.model.name = 'espresso'; <ide> binding.bind(); <ide> expect(binding.set).toHaveBeenCalledWith('espresso'); <add> }); <add> }); <add> <add> describe('with dependencies', function() { <add> beforeEach(function() { <add> binding.options.dependencies = ['fname', 'lname']; <add> }); <add> <add> it('sets up observers on the dependant attributes', function() { <add> spyOn(rivets.config.adapter, 'subscribe'); <add> binding.bind(); <add> expect(rivets.config.adapter.subscribe).toHaveBeenCalledWith(model, 'fname', binding.dependencyCallbacks['fname']); <add> expect(rivets.config.adapter.subscribe).toHaveBeenCalledWith(model, 'lname', binding.dependencyCallbacks['lname']); <ide> }); <ide> }); <ide> });
Java
bsd-3-clause
261b0f4e9baa82ebc873eee9c149ff810e853c7d
0
NCIP/rembrandt,NCIP/rembrandt,NCIP/rembrandt,NCIP/rembrandt,NCIP/rembrandt,NCIP/rembrandt
package gov.nih.nci.rembrandt.web.struts.action; import gov.nih.nci.caintegrator.application.lists.UserList; import gov.nih.nci.caintegrator.application.util.ClassHelper; import gov.nih.nci.caintegrator.dto.de.ArrayPlatformDE; import gov.nih.nci.caintegrator.dto.de.MultiGroupComparisonAdjustmentTypeDE; import gov.nih.nci.caintegrator.dto.de.StatisticTypeDE; import gov.nih.nci.caintegrator.dto.de.StatisticalSignificanceDE; import gov.nih.nci.caintegrator.dto.de.ExprFoldChangeDE.UpRegulation; import gov.nih.nci.caintegrator.dto.query.ClassComparisonQueryDTO; import gov.nih.nci.caintegrator.dto.query.ClinicalQueryDTO; import gov.nih.nci.caintegrator.dto.query.QueryType; import gov.nih.nci.caintegrator.enumeration.MultiGroupComparisonAdjustmentType; import gov.nih.nci.caintegrator.enumeration.Operator; import gov.nih.nci.caintegrator.enumeration.StatisticalMethodType; import gov.nih.nci.caintegrator.enumeration.StatisticalSignificanceType; import gov.nih.nci.caintegrator.exceptions.FrameworkException; import gov.nih.nci.caintegrator.security.UserCredentials; import gov.nih.nci.caintegrator.service.findings.Finding; import gov.nih.nci.rembrandt.dto.query.PatientUserListQueryDTO; import gov.nih.nci.rembrandt.cache.RembrandtPresentationTierCache; import gov.nih.nci.rembrandt.dto.query.ClinicalDataQuery; import gov.nih.nci.rembrandt.service.findings.RembrandtFindingsFactory; import gov.nih.nci.rembrandt.util.RembrandtConstants; import gov.nih.nci.rembrandt.web.bean.SessionQueryBag; import gov.nih.nci.rembrandt.web.factory.ApplicationFactory; import gov.nih.nci.rembrandt.web.helper.GroupRetriever; import gov.nih.nci.rembrandt.web.helper.EnumCaseChecker; import gov.nih.nci.rembrandt.web.helper.SampleBasedQueriesRetriever; import gov.nih.nci.rembrandt.web.struts.form.ClassComparisonForm; import java.util.ArrayList; import java.util.List; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.apache.log4j.Logger; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.apache.struts.actions.DispatchAction; /** * caIntegrator License * * Copyright 2001-2005 Science Applications International Corporation ("SAIC"). * The software subject to this notice and license includes both human readable source code form and machine readable, * binary, object code form ("the caIntegrator Software"). The caIntegrator Software was developed in conjunction with * the National Cancer Institute ("NCI") by NCI employees and employees of SAIC. * To the extent government employees are authors, any rights in such works shall be subject to Title 17 of the United States * Code, section 105. * This caIntegrator Software License (the "License") is between NCI and You. "You (or "Your") shall mean a person or an * entity, and all other entities that control, are controlled by, or are under common control with the entity. "Control" * for purposes of this definition means (i) the direct or indirect power to cause the direction or management of such entity, * whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) * beneficial ownership of such entity. * This License is granted provided that You agree to the conditions described below. NCI grants You a non-exclusive, * worldwide, perpetual, fully-paid-up, no-charge, irrevocable, transferable and royalty-free right and license in its rights * in the caIntegrator Software to (i) use, install, access, operate, execute, copy, modify, translate, market, publicly * display, publicly perform, and prepare derivative works of the caIntegrator Software; (ii) distribute and have distributed * to and by third parties the caIntegrator Software and any modifications and derivative works thereof; * and (iii) sublicense the foregoing rights set out in (i) and (ii) to third parties, including the right to license such * rights to further third parties. For sake of clarity, and not by way of limitation, NCI shall have no right of accounting * or right of payment from You or Your sublicensees for the rights granted under this License. This License is granted at no * charge to You. * 1. Your redistributions of the source code for the Software must retain the above copyright notice, this list of conditions * and the disclaimer and limitation of liability of Article 6, below. Your redistributions in object code form must reproduce * the above copyright notice, this list of conditions and the disclaimer of Article 6 in the documentation and/or other materials * provided with the distribution, if any. * 2. Your end-user documentation included with the redistribution, if any, must include the following acknowledgment: "This * product includes software developed by SAIC and the National Cancer Institute." If You do not include such end-user * documentation, You shall include this acknowledgment in the Software itself, wherever such third-party acknowledgments * normally appear. * 3. You may not use the names "The National Cancer Institute", "NCI" "Science Applications International Corporation" and * "SAIC" to endorse or promote products derived from this Software. This License does not authorize You to use any * trademarks, service marks, trade names, logos or product names of either NCI or SAIC, except as required to comply with * the terms of this License. * 4. For sake of clarity, and not by way of limitation, You may incorporate this Software into Your proprietary programs and * into any third party proprietary programs. However, if You incorporate the Software into third party proprietary * programs, You agree that You are solely responsible for obtaining any permission from such third parties required to * incorporate the Software into such third party proprietary programs and for informing Your sublicensees, including * without limitation Your end-users, of their obligation to secure any required permissions from such third parties * before incorporating the Software into such third party proprietary software programs. In the event that You fail * to obtain such permissions, You agree to indemnify NCI for any claims against NCI by such third parties, except to * the extent prohibited by law, resulting from Your failure to obtain such permissions. * 5. For sake of clarity, and not by way of limitation, You may add Your own copyright statement to Your modifications and * to the derivative works, and You may provide additional or different license terms and conditions in Your sublicenses * of modifications of the Software, or any derivative works of the Software as a whole, provided Your use, reproduction, * and distribution of the Work otherwise complies with the conditions stated in this License. * 6. THIS SOFTWARE IS PROVIDED "AS IS," AND ANY EXPRESSED OR IMPLIED WARRANTIES, (INCLUDING, BUT NOT LIMITED TO, * THE IMPLIED WARRANTIES OF MERCHANTABILITY, NON-INFRINGEMENT AND FITNESS FOR A PARTICULAR PURPOSE) ARE DISCLAIMED. * IN NO EVENT SHALL THE NATIONAL CANCER INSTITUTE, SAIC, OR THEIR AFFILIATES BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE * GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * */ public class ClassComparisonAction extends DispatchAction { private UserCredentials credentials; private static Logger logger = Logger.getLogger(ClassComparisonAction.class); private RembrandtPresentationTierCache presentationTierCache = ApplicationFactory.getPresentationTierCache(); /*** * These are the default error values used when an invalid enum type * parameter has been passed to the action. These default values should * be verified as useful in all cases. */ private MultiGroupComparisonAdjustmentType ERROR_MULTI_GROUP_COMPARE_ADJUSTMENT_TYPE = MultiGroupComparisonAdjustmentType.FWER; private StatisticalMethodType ERROR_STATISTICAL_METHOD_TYPE = StatisticalMethodType.TTest; /** * Method submittal * * @param ActionMapping * mapping * @param ActionForm * form * @param HttpServletRequest * request * @param HttpServletResponse * response * @return ActionForward * @throws Exception */ public ActionForward submit(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { ClassComparisonForm classComparisonForm = (ClassComparisonForm) form; String sessionId = request.getSession().getId(); ClassComparisonQueryDTO classComparisonQueryDTO = createClassComparisonQueryDTO(classComparisonForm,request.getSession()); /*Create the InstituteDEs using credentials from the local session. * May want to put these in the cache eventually. */ if(request.getSession().getAttribute(RembrandtConstants.USER_CREDENTIALS)!=null){ credentials = (UserCredentials) request.getSession().getAttribute(RembrandtConstants.USER_CREDENTIALS); classComparisonQueryDTO.setInstitutionDEs(credentials.getInstitutes()); } if (classComparisonQueryDTO!=null) { SessionQueryBag queryBag = presentationTierCache.getSessionQueryBag(sessionId); queryBag.putQueryDTO(classComparisonQueryDTO, classComparisonForm); presentationTierCache.putSessionQueryBag(sessionId, queryBag); } RembrandtFindingsFactory factory = new RembrandtFindingsFactory(); Finding finding = null; try { finding = factory.createClassComparisonFinding(classComparisonQueryDTO,sessionId,classComparisonQueryDTO.getQueryName()); } catch (FrameworkException e) { e.printStackTrace(); } return mapping.findForward("viewResults"); } public ActionForward setup(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { ClassComparisonForm classComparisonForm = (ClassComparisonForm) form; /*setup the defined Disease query names and the list of samples selected from a Resultset*/ GroupRetriever groupRetriever = new GroupRetriever(); classComparisonForm.setExistingGroupsList(groupRetriever.getClinicalGroupsCollection(request.getSession())); return mapping.findForward("backToClassComparison"); } private ClassComparisonQueryDTO createClassComparisonQueryDTO(ClassComparisonForm classComparisonQueryForm, HttpSession session){ ClassComparisonQueryDTO classComparisonQueryDTO = (ClassComparisonQueryDTO)ApplicationFactory.newQueryDTO(QueryType.CLASS_COMPARISON_QUERY); classComparisonQueryDTO.setQueryName(classComparisonQueryForm.getAnalysisResultName()); //Create the clinical query DTO collection from the selected groups in the form List<ClinicalQueryDTO> clinicalQueryCollection = new ArrayList<ClinicalQueryDTO>(); if(classComparisonQueryForm.getSelectedGroups() != null && classComparisonQueryForm.getSelectedGroups().length == 2 ){ for(int i=0; i<classComparisonQueryForm.getSelectedGroups().length; i++){ // //lets ensure the that the baseline is added last // if(!classComparisonQueryForm.getSelectedGroups()[i].equals(classComparisonQueryForm.getBaselineGroup())) { // clinicalDataQuery = sampleBasedQueriesRetriever.getQuery(sessionId, classComparisonQueryForm.getSelectedGroups()[i]); // //add logic to if there is no predefined query.. use the given samples from the user // // //bag and construct a clinical query to add into the collection // clinicalQueryCollection.add(clinicalDataQuery); // } // } // //now process the baseline // clinicalDataQuery = sampleBasedQueriesRetriever.getQuery(sessionId, classComparisonQueryForm.getBaselineGroup()); // clinicalQueryCollection.add(clinicalDataQuery); /* * parse the selected groups, create the appropriate EnumType and add it * to its respective EnumSet */ String[] uiDropdownString = classComparisonQueryForm.getSelectedGroups()[i].split("#"); String myClassName = uiDropdownString[0]; String myValueName = uiDropdownString[1]; Class myClass = ClassHelper.createClass(myClassName); if(myClass.isInstance(new UserList())){ PatientUserListQueryDTO patientQueryDTO = new PatientUserListQueryDTO(session,myValueName); clinicalQueryCollection.add(patientQueryDTO); if(i==1){//the second group is always baseline //to set baseline only when the statistical method //is not FTest if(!"FTest".equals(classComparisonQueryForm.getStatisticalMethod())) patientQueryDTO.setBaseline(true); } } } classComparisonQueryDTO.setComparisonGroups(clinicalQueryCollection); } //Create the foldChange DEs if (classComparisonQueryForm.getFoldChange().equals("list")){ UpRegulation exprFoldChangeDE = new UpRegulation(new Float(classComparisonQueryForm.getFoldChangeAuto())); classComparisonQueryDTO.setExprFoldChangeDE(exprFoldChangeDE); } if (classComparisonQueryForm.getFoldChange().equals("specify")){ UpRegulation exprFoldChangeDE = new UpRegulation(new Float(classComparisonQueryForm.getFoldChangeManual())); classComparisonQueryDTO.setExprFoldChangeDE(exprFoldChangeDE); } //Create arrayPlatfrom DEs if(classComparisonQueryForm.getArrayPlatform() != "" || classComparisonQueryForm.getArrayPlatform().length() != 0){ ArrayPlatformDE arrayPlatformDE = new ArrayPlatformDE(classComparisonQueryForm.getArrayPlatform()); classComparisonQueryDTO.setArrayPlatformDE(arrayPlatformDE); } //Create class comparison DEs /* * This following code is here to deal with an observed problem with the changing * of case in request parameters. See the class EnumCaseChecker for * enlightenment. */ MultiGroupComparisonAdjustmentType mgAdjustmentType; String multiGroupComparisonAdjustmentTypeString= EnumCaseChecker.getEnumTypeName(classComparisonQueryForm.getComparisonAdjustment(),MultiGroupComparisonAdjustmentType.values()); if(multiGroupComparisonAdjustmentTypeString!=null) { mgAdjustmentType = MultiGroupComparisonAdjustmentType.valueOf(multiGroupComparisonAdjustmentTypeString); }else { logger.error("Invalid MultiGroupComparisonAdjustmentType value given in request"); logger.error("Selected MultiGroupComparisonAdjustmentType value = "+classComparisonQueryForm.getComparisonAdjustment()); logger.error("Using the default MultiGroupComparisonAdjustmentType value = "+ERROR_MULTI_GROUP_COMPARE_ADJUSTMENT_TYPE); mgAdjustmentType = ERROR_MULTI_GROUP_COMPARE_ADJUSTMENT_TYPE; } MultiGroupComparisonAdjustmentTypeDE multiGroupComparisonAdjustmentTypeDE = new MultiGroupComparisonAdjustmentTypeDE(mgAdjustmentType); ; if(!classComparisonQueryForm.getComparisonAdjustment().equalsIgnoreCase("NONE")){ StatisticalSignificanceDE statisticalSignificanceDE = new StatisticalSignificanceDE(classComparisonQueryForm.getStatisticalSignificance(),Operator.LE,StatisticalSignificanceType.adjustedpValue); classComparisonQueryDTO.setMultiGroupComparisonAdjustmentTypeDE(multiGroupComparisonAdjustmentTypeDE); classComparisonQueryDTO.setStatisticalSignificanceDE(statisticalSignificanceDE); } else{ StatisticalSignificanceDE statisticalSignificanceDE = new StatisticalSignificanceDE(classComparisonQueryForm.getStatisticalSignificance(),Operator.LE,StatisticalSignificanceType.pValue); classComparisonQueryDTO.setMultiGroupComparisonAdjustmentTypeDE(multiGroupComparisonAdjustmentTypeDE); classComparisonQueryDTO.setStatisticalSignificanceDE(statisticalSignificanceDE); } if(classComparisonQueryForm.getStatisticalMethod() != "" || classComparisonQueryForm.getStatisticalMethod().length() != 0){ /* * This following code is here to deal with an observed problem with the changing * of case in request parameters. See the class EnumCaseChecker for * enlightenment. */ StatisticalMethodType statisticalMethodType; String statisticalMethodTypeString= EnumCaseChecker.getEnumTypeName(classComparisonQueryForm.getStatisticalMethod(),StatisticalMethodType.values()); if(statisticalMethodTypeString!=null) { statisticalMethodType = StatisticalMethodType.valueOf(statisticalMethodTypeString); }else { logger.error("Invalid StatisticalMethodType value given in request"); logger.error("Selected StatisticalMethodType value = "+classComparisonQueryForm.getStatisticalMethod()); logger.error("Using the default StatisticalMethodType type of :"+ERROR_STATISTICAL_METHOD_TYPE); statisticalMethodType = ERROR_STATISTICAL_METHOD_TYPE; } StatisticTypeDE statisticTypeDE = new StatisticTypeDE(statisticalMethodType); classComparisonQueryDTO.setStatisticTypeDE(statisticTypeDE); } return classComparisonQueryDTO; } }
src/gov/nih/nci/rembrandt/web/struts/action/ClassComparisonAction.java
package gov.nih.nci.rembrandt.web.struts.action; import gov.nih.nci.caintegrator.application.lists.UserList; import gov.nih.nci.caintegrator.application.util.ClassHelper; import gov.nih.nci.caintegrator.dto.de.ArrayPlatformDE; import gov.nih.nci.caintegrator.dto.de.MultiGroupComparisonAdjustmentTypeDE; import gov.nih.nci.caintegrator.dto.de.StatisticTypeDE; import gov.nih.nci.caintegrator.dto.de.StatisticalSignificanceDE; import gov.nih.nci.caintegrator.dto.de.ExprFoldChangeDE.UpRegulation; import gov.nih.nci.caintegrator.dto.query.ClassComparisonQueryDTO; import gov.nih.nci.caintegrator.dto.query.ClinicalQueryDTO; import gov.nih.nci.caintegrator.dto.query.QueryType; import gov.nih.nci.caintegrator.enumeration.MultiGroupComparisonAdjustmentType; import gov.nih.nci.caintegrator.enumeration.Operator; import gov.nih.nci.caintegrator.enumeration.StatisticalMethodType; import gov.nih.nci.caintegrator.enumeration.StatisticalSignificanceType; import gov.nih.nci.caintegrator.exceptions.FrameworkException; import gov.nih.nci.caintegrator.security.UserCredentials; import gov.nih.nci.caintegrator.service.findings.Finding; import gov.nih.nci.rembrandt.dto.query.PatientUserListQueryDTO; import gov.nih.nci.rembrandt.cache.RembrandtPresentationTierCache; import gov.nih.nci.rembrandt.dto.query.ClinicalDataQuery; import gov.nih.nci.rembrandt.service.findings.RembrandtFindingsFactory; import gov.nih.nci.rembrandt.util.RembrandtConstants; import gov.nih.nci.rembrandt.web.bean.SessionQueryBag; import gov.nih.nci.rembrandt.web.factory.ApplicationFactory; import gov.nih.nci.rembrandt.web.helper.GroupRetriever; import gov.nih.nci.rembrandt.web.helper.EnumCaseChecker; import gov.nih.nci.rembrandt.web.helper.SampleBasedQueriesRetriever; import gov.nih.nci.rembrandt.web.struts.form.ClassComparisonForm; import java.util.ArrayList; import java.util.List; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.apache.log4j.Logger; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.apache.struts.actions.DispatchAction; /** * caIntegrator License * * Copyright 2001-2005 Science Applications International Corporation ("SAIC"). * The software subject to this notice and license includes both human readable source code form and machine readable, * binary, object code form ("the caIntegrator Software"). The caIntegrator Software was developed in conjunction with * the National Cancer Institute ("NCI") by NCI employees and employees of SAIC. * To the extent government employees are authors, any rights in such works shall be subject to Title 17 of the United States * Code, section 105. * This caIntegrator Software License (the "License") is between NCI and You. "You (or "Your") shall mean a person or an * entity, and all other entities that control, are controlled by, or are under common control with the entity. "Control" * for purposes of this definition means (i) the direct or indirect power to cause the direction or management of such entity, * whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) * beneficial ownership of such entity. * This License is granted provided that You agree to the conditions described below. NCI grants You a non-exclusive, * worldwide, perpetual, fully-paid-up, no-charge, irrevocable, transferable and royalty-free right and license in its rights * in the caIntegrator Software to (i) use, install, access, operate, execute, copy, modify, translate, market, publicly * display, publicly perform, and prepare derivative works of the caIntegrator Software; (ii) distribute and have distributed * to and by third parties the caIntegrator Software and any modifications and derivative works thereof; * and (iii) sublicense the foregoing rights set out in (i) and (ii) to third parties, including the right to license such * rights to further third parties. For sake of clarity, and not by way of limitation, NCI shall have no right of accounting * or right of payment from You or Your sublicensees for the rights granted under this License. This License is granted at no * charge to You. * 1. Your redistributions of the source code for the Software must retain the above copyright notice, this list of conditions * and the disclaimer and limitation of liability of Article 6, below. Your redistributions in object code form must reproduce * the above copyright notice, this list of conditions and the disclaimer of Article 6 in the documentation and/or other materials * provided with the distribution, if any. * 2. Your end-user documentation included with the redistribution, if any, must include the following acknowledgment: "This * product includes software developed by SAIC and the National Cancer Institute." If You do not include such end-user * documentation, You shall include this acknowledgment in the Software itself, wherever such third-party acknowledgments * normally appear. * 3. You may not use the names "The National Cancer Institute", "NCI" "Science Applications International Corporation" and * "SAIC" to endorse or promote products derived from this Software. This License does not authorize You to use any * trademarks, service marks, trade names, logos or product names of either NCI or SAIC, except as required to comply with * the terms of this License. * 4. For sake of clarity, and not by way of limitation, You may incorporate this Software into Your proprietary programs and * into any third party proprietary programs. However, if You incorporate the Software into third party proprietary * programs, You agree that You are solely responsible for obtaining any permission from such third parties required to * incorporate the Software into such third party proprietary programs and for informing Your sublicensees, including * without limitation Your end-users, of their obligation to secure any required permissions from such third parties * before incorporating the Software into such third party proprietary software programs. In the event that You fail * to obtain such permissions, You agree to indemnify NCI for any claims against NCI by such third parties, except to * the extent prohibited by law, resulting from Your failure to obtain such permissions. * 5. For sake of clarity, and not by way of limitation, You may add Your own copyright statement to Your modifications and * to the derivative works, and You may provide additional or different license terms and conditions in Your sublicenses * of modifications of the Software, or any derivative works of the Software as a whole, provided Your use, reproduction, * and distribution of the Work otherwise complies with the conditions stated in this License. * 6. THIS SOFTWARE IS PROVIDED "AS IS," AND ANY EXPRESSED OR IMPLIED WARRANTIES, (INCLUDING, BUT NOT LIMITED TO, * THE IMPLIED WARRANTIES OF MERCHANTABILITY, NON-INFRINGEMENT AND FITNESS FOR A PARTICULAR PURPOSE) ARE DISCLAIMED. * IN NO EVENT SHALL THE NATIONAL CANCER INSTITUTE, SAIC, OR THEIR AFFILIATES BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE * GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * */ public class ClassComparisonAction extends DispatchAction { private UserCredentials credentials; private static Logger logger = Logger.getLogger(ClassComparisonAction.class); private RembrandtPresentationTierCache presentationTierCache = ApplicationFactory.getPresentationTierCache(); /*** * These are the default error values used when an invalid enum type * parameter has been passed to the action. These default values should * be verified as useful in all cases. */ private MultiGroupComparisonAdjustmentType ERROR_MULTI_GROUP_COMPARE_ADJUSTMENT_TYPE = MultiGroupComparisonAdjustmentType.FWER; private StatisticalMethodType ERROR_STATISTICAL_METHOD_TYPE = StatisticalMethodType.TTest; /** * Method submittal * * @param ActionMapping * mapping * @param ActionForm * form * @param HttpServletRequest * request * @param HttpServletResponse * response * @return ActionForward * @throws Exception */ public ActionForward submit(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { ClassComparisonForm classComparisonForm = (ClassComparisonForm) form; String sessionId = request.getSession().getId(); ClassComparisonQueryDTO classComparisonQueryDTO = createClassComparisonQueryDTO(classComparisonForm,request.getSession()); /*Create the InstituteDEs using credentials from the local session. * May want to put these in the cache eventually. */ if(request.getSession().getAttribute(RembrandtConstants.USER_CREDENTIALS)!=null){ credentials = (UserCredentials) request.getSession().getAttribute(RembrandtConstants.USER_CREDENTIALS); classComparisonQueryDTO.setInstitutionDEs(credentials.getInstitutes()); } if (classComparisonQueryDTO!=null) { SessionQueryBag queryBag = presentationTierCache.getSessionQueryBag(sessionId); queryBag.putQueryDTO(classComparisonQueryDTO, classComparisonForm); presentationTierCache.putSessionQueryBag(sessionId, queryBag); } RembrandtFindingsFactory factory = new RembrandtFindingsFactory(); Finding finding = null; try { finding = factory.createClassComparisonFinding(classComparisonQueryDTO,sessionId,classComparisonQueryDTO.getQueryName()); } catch (FrameworkException e) { e.printStackTrace(); } return mapping.findForward("viewResults"); } public ActionForward setup(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { ClassComparisonForm classComparisonForm = (ClassComparisonForm) form; /*setup the defined Disease query names and the list of samples selected from a Resultset*/ GroupRetriever groupRetriever = new GroupRetriever(); classComparisonForm.setExistingGroupsList(groupRetriever.getClinicalGroupsCollection(request.getSession())); return mapping.findForward("backToClassComparison"); } private ClassComparisonQueryDTO createClassComparisonQueryDTO(ClassComparisonForm classComparisonQueryForm, HttpSession session){ ClassComparisonQueryDTO classComparisonQueryDTO = (ClassComparisonQueryDTO)ApplicationFactory.newQueryDTO(QueryType.CLASS_COMPARISON_QUERY); classComparisonQueryDTO.setQueryName(classComparisonQueryForm.getAnalysisResultName()); //Create the clinical query DTO collection from the selected groups in the form List<ClinicalQueryDTO> clinicalQueryCollection = new ArrayList<ClinicalQueryDTO>(); if(classComparisonQueryForm.getSelectedGroups() != null && classComparisonQueryForm.getSelectedGroups().length == 2 ){ for(int i=0; i<classComparisonQueryForm.getSelectedGroups().length; i++){ // //lets ensure the that the baseline is added last // if(!classComparisonQueryForm.getSelectedGroups()[i].equals(classComparisonQueryForm.getBaselineGroup())) { // clinicalDataQuery = sampleBasedQueriesRetriever.getQuery(sessionId, classComparisonQueryForm.getSelectedGroups()[i]); // //add logic to if there is no predefined query.. use the given samples from the user // // //bag and construct a clinical query to add into the collection // clinicalQueryCollection.add(clinicalDataQuery); // } // } // //now process the baseline // clinicalDataQuery = sampleBasedQueriesRetriever.getQuery(sessionId, classComparisonQueryForm.getBaselineGroup()); // clinicalQueryCollection.add(clinicalDataQuery); /* * parse the selected groups, create the appropriate EnumType and add it * to its respective EnumSet */ String[] uiDropdownString = classComparisonQueryForm.getSelectedGroups()[i].split("#"); String myClassName = uiDropdownString[0]; String myValueName = uiDropdownString[1]; Class myClass = ClassHelper.createClass(myClassName); if(myClass.isInstance(new UserList())){ PatientUserListQueryDTO patientQueryDTO = new PatientUserListQueryDTO(session,myValueName); clinicalQueryCollection.add(patientQueryDTO); if(i==1){//the second group is always baseline patientQueryDTO.setBaseline(true); } } } classComparisonQueryDTO.setComparisonGroups(clinicalQueryCollection); } //Create the foldChange DEs if (classComparisonQueryForm.getFoldChange().equals("list")){ UpRegulation exprFoldChangeDE = new UpRegulation(new Float(classComparisonQueryForm.getFoldChangeAuto())); classComparisonQueryDTO.setExprFoldChangeDE(exprFoldChangeDE); } if (classComparisonQueryForm.getFoldChange().equals("specify")){ UpRegulation exprFoldChangeDE = new UpRegulation(new Float(classComparisonQueryForm.getFoldChangeManual())); classComparisonQueryDTO.setExprFoldChangeDE(exprFoldChangeDE); } //Create arrayPlatfrom DEs if(classComparisonQueryForm.getArrayPlatform() != "" || classComparisonQueryForm.getArrayPlatform().length() != 0){ ArrayPlatformDE arrayPlatformDE = new ArrayPlatformDE(classComparisonQueryForm.getArrayPlatform()); classComparisonQueryDTO.setArrayPlatformDE(arrayPlatformDE); } //Create class comparison DEs /* * This following code is here to deal with an observed problem with the changing * of case in request parameters. See the class EnumCaseChecker for * enlightenment. */ MultiGroupComparisonAdjustmentType mgAdjustmentType; String multiGroupComparisonAdjustmentTypeString= EnumCaseChecker.getEnumTypeName(classComparisonQueryForm.getComparisonAdjustment(),MultiGroupComparisonAdjustmentType.values()); if(multiGroupComparisonAdjustmentTypeString!=null) { mgAdjustmentType = MultiGroupComparisonAdjustmentType.valueOf(multiGroupComparisonAdjustmentTypeString); }else { logger.error("Invalid MultiGroupComparisonAdjustmentType value given in request"); logger.error("Selected MultiGroupComparisonAdjustmentType value = "+classComparisonQueryForm.getComparisonAdjustment()); logger.error("Using the default MultiGroupComparisonAdjustmentType value = "+ERROR_MULTI_GROUP_COMPARE_ADJUSTMENT_TYPE); mgAdjustmentType = ERROR_MULTI_GROUP_COMPARE_ADJUSTMENT_TYPE; } MultiGroupComparisonAdjustmentTypeDE multiGroupComparisonAdjustmentTypeDE = new MultiGroupComparisonAdjustmentTypeDE(mgAdjustmentType); ; if(!classComparisonQueryForm.getComparisonAdjustment().equalsIgnoreCase("NONE")){ StatisticalSignificanceDE statisticalSignificanceDE = new StatisticalSignificanceDE(classComparisonQueryForm.getStatisticalSignificance(),Operator.LE,StatisticalSignificanceType.adjustedpValue); classComparisonQueryDTO.setMultiGroupComparisonAdjustmentTypeDE(multiGroupComparisonAdjustmentTypeDE); classComparisonQueryDTO.setStatisticalSignificanceDE(statisticalSignificanceDE); } else{ StatisticalSignificanceDE statisticalSignificanceDE = new StatisticalSignificanceDE(classComparisonQueryForm.getStatisticalSignificance(),Operator.LE,StatisticalSignificanceType.pValue); classComparisonQueryDTO.setMultiGroupComparisonAdjustmentTypeDE(multiGroupComparisonAdjustmentTypeDE); classComparisonQueryDTO.setStatisticalSignificanceDE(statisticalSignificanceDE); } if(classComparisonQueryForm.getStatisticalMethod() != "" || classComparisonQueryForm.getStatisticalMethod().length() != 0){ /* * This following code is here to deal with an observed problem with the changing * of case in request parameters. See the class EnumCaseChecker for * enlightenment. */ StatisticalMethodType statisticalMethodType; String statisticalMethodTypeString= EnumCaseChecker.getEnumTypeName(classComparisonQueryForm.getStatisticalMethod(),StatisticalMethodType.values()); if(statisticalMethodTypeString!=null) { statisticalMethodType = StatisticalMethodType.valueOf(statisticalMethodTypeString); }else { logger.error("Invalid StatisticalMethodType value given in request"); logger.error("Selected StatisticalMethodType value = "+classComparisonQueryForm.getStatisticalMethod()); logger.error("Using the default StatisticalMethodType type of :"+ERROR_STATISTICAL_METHOD_TYPE); statisticalMethodType = ERROR_STATISTICAL_METHOD_TYPE; } StatisticTypeDE statisticTypeDE = new StatisticTypeDE(statisticalMethodType); classComparisonQueryDTO.setStatisticTypeDE(statisticTypeDE); } return classComparisonQueryDTO; } }
Modified code to make baseline setting specific to non-FTest. SVN-Revision: 2406
src/gov/nih/nci/rembrandt/web/struts/action/ClassComparisonAction.java
Modified code to make baseline setting specific to non-FTest.
<ide><path>rc/gov/nih/nci/rembrandt/web/struts/action/ClassComparisonAction.java <ide> PatientUserListQueryDTO patientQueryDTO = new PatientUserListQueryDTO(session,myValueName); <ide> clinicalQueryCollection.add(patientQueryDTO); <ide> if(i==1){//the second group is always baseline <del> patientQueryDTO.setBaseline(true); <add> //to set baseline only when the statistical method <add> //is not FTest <add> if(!"FTest".equals(classComparisonQueryForm.getStatisticalMethod())) <add> patientQueryDTO.setBaseline(true); <ide> } <ide> } <ide> }
Java
apache-2.0
56305e9807251882b664682da86b4d38a7c9946c
0
Hipparchus-Math/hipparchus,Hipparchus-Math/hipparchus,apache/commons-math,apache/commons-math,sdinot/hipparchus,apache/commons-math,Hipparchus-Math/hipparchus,sdinot/hipparchus,sdinot/hipparchus,Hipparchus-Math/hipparchus,sdinot/hipparchus,apache/commons-math
/* * Copyright 2003-2005 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.math.distribution; /** * Test cases for TDistribution. * Extends ContinuousDistributionAbstractTest. See class javadoc for * ContinuousDistributionAbstractTest for details. * * @version $Revision: 1.15 $ $Date$ */ public class TDistributionTest extends ContinuousDistributionAbstractTest { /** * Constructor for TDistributionTest. * @param name */ public TDistributionTest(String name) { super(name); } //-------------- Implementations for abstract methods ----------------------- /** Creates the default continuous distribution instance to use in tests. */ public ContinuousDistribution makeDistribution() { return DistributionFactory.newInstance().createTDistribution(5.0); } /** Creates the default cumulative probability distribution test input values */ public double[] makeCumulativeTestPoints() { // quantiles computed using R version 1.8.1 (linux version) return new double[] {-5.89343,-3.36493, -2.570582, -2.015048, -1.475884, 0.0, 5.89343, 3.36493, 2.570582, 2.015048, 1.475884}; } /** Creates the default cumulative probability density test expected values */ public double[] makeCumulativeTestValues() { return new double[] {0.001d, 0.01d, 0.025d, 0.05d, 0.1d, 0.5d, 0.999d, 0.990d, 0.975d, 0.950d, 0.900d}; } // --------------------- Override tolerance -------------- protected void setup() throws Exception { super.setUp(); setTolerance(1E-6); } //---------------------------- Additional test cases ------------------------- /** * @see <a href="http://issues.apache.org/bugzilla/show_bug.cgi?id=27243"> * Bug report that prompted this unit test.</a> */ public void testCumulativeProbabilityAgaintStackOverflow() throws Exception { TDistributionImpl td = new TDistributionImpl(5.); double est; est = td.cumulativeProbability(.1); est = td.cumulativeProbability(.01); } public void testSmallDf() throws Exception { setDistribution(DistributionFactory.newInstance().createTDistribution(1d)); setTolerance(1E-4); // quantiles computed using R version 1.8.1 (linux version) setCumulativeTestPoints(new double[] {-318.3088, -31.82052, -12.70620, -6.313752, -3.077684, 0.0, 318.3088, 31.82052, 12.70620, 6.313752, 3.077684}); setInverseCumulativeTestValues(getCumulativeTestPoints()); verifyCumulativeProbabilities(); verifyInverseCumulativeProbabilities(); } public void testInverseCumulativeProbabilityExtremes() throws Exception { setInverseCumulativeTestPoints(new double[] {0, 1}); setInverseCumulativeTestValues( new double[] {Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY}); verifyInverseCumulativeProbabilities(); } public void testDfAccessors() { TDistribution distribution = (TDistribution) getDistribution(); assertEquals(5d, distribution.getDegreesOfFreedom(), Double.MIN_VALUE); distribution.setDegreesOfFreedom(4d); assertEquals(4d, distribution.getDegreesOfFreedom(), Double.MIN_VALUE); try { distribution.setDegreesOfFreedom(0d); fail("Expecting IllegalArgumentException for df = 0"); } catch (IllegalArgumentException ex) { // expected } } }
src/test/org/apache/commons/math/distribution/TDistributionTest.java
/* * Copyright 2003-2005 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.math.distribution; /** * Test cases for TDistribution. * Extends ContinuousDistributionAbstractTest. See class javadoc for * ContinuousDistributionAbstractTest for details. * * @version $Revision: 1.15 $ $Date$ */ public class TDistributionTest extends ContinuousDistributionAbstractTest { /** * Constructor for TDistributionTest. * @param name */ public TDistributionTest(String name) { super(name); } //-------------- Implementations for abstract methods ----------------------- /** Creates the default continuous distribution instance to use in tests. */ public ContinuousDistribution makeDistribution() { return DistributionFactory.newInstance().createTDistribution(5.0); } /** Creates the default cumulative probability distribution test input values */ public double[] makeCumulativeTestPoints() { // quantiles computed using R version 1.8.1 (linux version) return new double[] {-5.89343,-3.36493, -2.570582, -2.015048, -1.475884, 0.0, 5.89343, 3.36493, 2.570582, 2.015048, 1.475884}; } /** Creates the default cumulative probability density test expected values */ public double[] makeCumulativeTestValues() { return new double[] {0.001d, 0.01d, 0.025d, 0.05d, 0.1d, 0.0d, 0.999d, 0.990d, 0.975d, 0.950d, 0.900d}; } // --------------------- Override tolerance -------------- protected void setup() throws Exception { super.setUp(); setTolerance(1E-6); } //---------------------------- Additional test cases ------------------------- /** * @see <a href="http://issues.apache.org/bugzilla/show_bug.cgi?id=27243"> * Bug report that prompted this unit test.</a> */ public void testCumulativeProbabilityAgaintStackOverflow() throws Exception { TDistributionImpl td = new TDistributionImpl(5.); double est; est = td.cumulativeProbability(.1); est = td.cumulativeProbability(.01); } public void testSmallDf() throws Exception { setDistribution(DistributionFactory.newInstance().createTDistribution(1d)); setTolerance(1E-4); // quantiles computed using R version 1.8.1 (linux version) setCumulativeTestPoints(new double[] {-318.3088, -31.82052, -12.70620, -6.313752, -3.077684, 318.3088, 31.82052, 12.70620, 6.313752, 3.077684}); setInverseCumulativeTestValues(getCumulativeTestPoints()); verifyCumulativeProbabilities(); verifyInverseCumulativeProbabilities(); } public void testInverseCumulativeProbabilityExtremes() throws Exception { setInverseCumulativeTestPoints(new double[] {0, 1}); setInverseCumulativeTestValues( new double[] {Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY}); verifyInverseCumulativeProbabilities(); } public void testDfAccessors() { TDistribution distribution = (TDistribution) getDistribution(); assertEquals(5d, distribution.getDegreesOfFreedom(), Double.MIN_VALUE); distribution.setDegreesOfFreedom(4d); assertEquals(4d, distribution.getDegreesOfFreedom(), Double.MIN_VALUE); try { distribution.setDegreesOfFreedom(0d); fail("Expecting IllegalArgumentException for df = 0"); } catch (IllegalArgumentException ex) { // expected } } }
fixed incorrect test data that was causing test failures. git-svn-id: ba027325f5cbd8d0def94cfab53a7170165593ea@155246 13f79535-47bb-0310-9956-ffa450edef68
src/test/org/apache/commons/math/distribution/TDistributionTest.java
fixed incorrect test data that was causing test failures.
<ide><path>rc/test/org/apache/commons/math/distribution/TDistributionTest.java <ide> <ide> /** Creates the default cumulative probability density test expected values */ <ide> public double[] makeCumulativeTestValues() { <del> return new double[] {0.001d, 0.01d, 0.025d, 0.05d, 0.1d, 0.0d, 0.999d, <add> return new double[] {0.001d, 0.01d, 0.025d, 0.05d, 0.1d, 0.5d, 0.999d, <ide> 0.990d, 0.975d, 0.950d, 0.900d}; <ide> } <ide> <ide> setTolerance(1E-4); <ide> // quantiles computed using R version 1.8.1 (linux version) <ide> setCumulativeTestPoints(new double[] {-318.3088, -31.82052, -12.70620, -6.313752, <del> -3.077684, 318.3088, 31.82052, 12.70620, <add> -3.077684, 0.0, 318.3088, 31.82052, 12.70620, <ide> 6.313752, 3.077684}); <ide> setInverseCumulativeTestValues(getCumulativeTestPoints()); <ide> verifyCumulativeProbabilities();
Java
apache-2.0
c2dbc0aaeb98403db25dd97f951676114b7c6403
0
ldbc/ldbc_graphalytics,ldbc/ldbc_graphalytics,tudelft-atlarge/graphalytics,tudelft-atlarge/graphalytics,tudelft-atlarge/graphalytics,ldbc/ldbc_graphalytics,tudelft-atlarge/graphalytics,ldbc/ldbc_graphalytics,tudelft-atlarge/graphalytics,ldbc/ldbc_graphalytics
/* * Copyright 2015 Delft University of Technology * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package nl.tudelft.graphalytics; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Paths; import nl.tudelft.graphalytics.network.ExecutorService; import nl.tudelft.graphalytics.util.TimeUtility; import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import nl.tudelft.graphalytics.domain.Benchmark; import nl.tudelft.graphalytics.domain.BenchmarkResult; import nl.tudelft.graphalytics.domain.BenchmarkSuite; import nl.tudelft.graphalytics.domain.BenchmarkSuiteResult; import nl.tudelft.graphalytics.domain.BenchmarkSuiteResult.BenchmarkSuiteResultBuilder; import nl.tudelft.graphalytics.domain.Graph; import nl.tudelft.graphalytics.domain.GraphSet; import nl.tudelft.graphalytics.domain.NestedConfiguration; import nl.tudelft.graphalytics.domain.SystemDetails; import nl.tudelft.graphalytics.plugin.Plugins; import nl.tudelft.graphalytics.util.GraphFileManager; /** * Helper class for executing all benchmarks in a BenchmarkSuite on a specific Platform. * * @author Tim Hegeman */ public class BenchmarkSuiteExecutor { private static final Logger LOG = LogManager.getLogger(); private ExecutorService service; public static final String BENCHMARK_PROPERTIES_FILE = "benchmark.properties"; private final BenchmarkSuite benchmarkSuite; private final Platform platform; private final Plugins plugins; private final int timeoutDuration; /** * @param benchmarkSuite the suite of benchmarks to run * @param platform the platform instance to run the benchmarks on * @param plugins collection of loaded plugins */ public BenchmarkSuiteExecutor(BenchmarkSuite benchmarkSuite, Platform platform, Plugins plugins) { this.benchmarkSuite = benchmarkSuite; this.platform = platform; this.plugins = plugins; try { Configuration benchmarkConf = new PropertiesConfiguration(BENCHMARK_PROPERTIES_FILE); timeoutDuration = benchmarkConf.getInt("benchmark.run.timeout"); } catch (ConfigurationException e) { e.printStackTrace(); throw new IllegalStateException("Failed to load configurations from " + BENCHMARK_PROPERTIES_FILE); } // Init the executor service; ExecutorService.InitService(this); } /** * Executes the Graphalytics benchmark suite on the given platform. The benchmarks are grouped by graph so that each * graph is uploaded to the platform exactly once. After executing all benchmarks for a specific graph, the graph * is deleted from the platform. * * @return a BenchmarkSuiteResult object containing the gathered benchmark results and details */ public BenchmarkSuiteResult execute() { // TODO: Retrieve configuration for system, platform, and platform per benchmark // Use a BenchmarkSuiteResultBuilder to track the benchmark results gathered throughout execution BenchmarkSuiteResultBuilder benchmarkSuiteResultBuilder = new BenchmarkSuiteResultBuilder(benchmarkSuite); long totalStartTime = System.currentTimeMillis(); int finishedBenchmark = 0; int numBenchmark = benchmarkSuite.getBenchmarks().size(); LOG.info(""); LOG.info(String.format("This benchmark suite consists of %s benchmarks in total.", numBenchmark)); for (GraphSet graphSet : benchmarkSuite.getGraphSets()) { for (Graph graph : graphSet.getGraphs()) { LOG.debug(String.format("Preparing for %s benchmark runs that use graph %s.", benchmarkSuite.getBenchmarksForGraph(graph).size(), graph.getName())); LOG.info(""); LOG.info(String.format("=======Start of Upload Graph %s =======", graph.getName())); // Skip the graph if there are no benchmarks to run on it if (benchmarkSuite.getBenchmarksForGraph(graph).isEmpty()) { continue; } // Ensure that the graph input files exist (i.e. generate them from the GraphSet sources if needed) try { GraphFileManager.ensureGraphFilesExist(graph); } catch (IOException ex) { LOG.error("Can not ensure that graph \"" + graph.getName() + "\" exists, skipping.", ex); continue; } // Upload the graph try { platform.uploadGraph(graph); } catch (Exception ex) { LOG.error("Failed to upload graph \"" + graph.getName() + "\", skipping.", ex); continue; } LOG.info(String.format("=======End of Upload Graph %s =======", graph.getName())); LOG.info(""); // Execute all benchmarks for this graph for (Benchmark benchmark : benchmarkSuite.getBenchmarksForGraph(graph)) { // Ensure that the output directory exists, if needed if (benchmark.isOutputRequired()) { try { Files.createDirectories(Paths.get(benchmark.getOutputPath()).getParent()); } catch (IOException e) { LOG.error("Failed to create output directory \"" + Paths.get(benchmark.getOutputPath()).getParent() + "\", skipping.", e); continue; } } String benchmarkText = String.format("%s:\"%s on %s\"", benchmark.getId(), benchmark.getAlgorithm().getAcronym(), graphSet.getName()); LOG.info(""); LOG.info(String.format("=======Start of Benchmark %s [%s/%s]=======", benchmark.getId(), finishedBenchmark + 1, numBenchmark)); // Execute the pre-benchmark steps of all plugins plugins.preBenchmark(benchmark); LOG.info(String.format("Benchmark %s started.", benchmarkText)); Process process = BenchmarkRunner.InitializeJvmProcess(platform.getName(), benchmark.getId()); BenchmarkRunnerInfo runnerInfo = new BenchmarkRunnerInfo(benchmark, process); ExecutorService.runnerInfos.put(benchmark.getId(), runnerInfo); // wait for runner to get started. long waitingStarted; LOG.info("Initializing benchmark runner..."); waitingStarted = System.currentTimeMillis(); while (!runnerInfo.isRegistered()) { if(System.currentTimeMillis() - waitingStarted > 10 * 1000) { LOG.error("There is no response from the benchmark runner. Benchmark run failed."); break; } else { TimeUtility.waitFor(1); } } LOG.info("The benchmark runner is initialized."); LOG.info("Running benchmark..."); LOG.info("Benchmark logs at: \"" + benchmark.getLogPath() +"\"."); LOG.info("Waiting for completion... (Timeout after " + timeoutDuration + " seconds)"); waitingStarted = System.currentTimeMillis(); while (!runnerInfo.isCompleted()) { if(System.currentTimeMillis() - waitingStarted > timeoutDuration * 1000) { LOG.error("Timeout is reached. This benchmark run is skipped."); break; } else { TimeUtility.waitFor(1); } } BenchmarkRunner.TerminateJvmProcess(process); BenchmarkResult benchmarkResult = runnerInfo.getBenchmarkResult(); if(benchmarkResult != null) { benchmarkSuiteResultBuilder.withBenchmarkResult(benchmarkResult); long makespan = (benchmarkResult.getEndOfBenchmark().getTime() - benchmarkResult.getStartOfBenchmark().getTime()); LOG.info(String.format("Benchmark %s %s (completed: %s, validated: %s), which took: %s ms.", benchmark.getId(), benchmarkResult.isSuccessful() ? "succeed" : "failed", benchmarkResult.isCompleted(), benchmarkResult.isValidated(), makespan)); } else { benchmarkSuiteResultBuilder.withoutBenchmarkResult(benchmark); LOG.info(String.format("Benchmark %s %s (completed: %s, validated: %s).", benchmark.getId(), "failed", false, false)); } LOG.info(String.format("Benchmark %s ended.", benchmarkText)); // Execute the post-benchmark steps of all plugins LOG.info(String.format("Cleaning up %s.", benchmarkText)); platform.cleanup(benchmark); plugins.postBenchmark(benchmark, benchmarkResult); finishedBenchmark++; LOG.info(String.format("=======End of Benchmark %s [%s/%s]=======", benchmark.getId(), finishedBenchmark, numBenchmark)); LOG.info(""); LOG.info(""); } // Delete the graph platform.deleteGraph(graph.getName()); } } service.terminate(); long totalEndTime = System.currentTimeMillis(); long totalDuration = totalEndTime - totalStartTime; // Dump the used configuration NestedConfiguration benchmarkConfiguration = NestedConfiguration.empty(); try { Configuration configuration = new PropertiesConfiguration("benchmark.properties"); benchmarkConfiguration = NestedConfiguration.fromExternalConfiguration(configuration, "benchmark.properties"); } catch (ConfigurationException e) { // Already reported during loading of benchmark } // Construct the BenchmarkSuiteResult return benchmarkSuiteResultBuilder.buildFromConfiguration(SystemDetails.empty(), benchmarkConfiguration, platform.getPlatformConfiguration(), totalDuration); } public void setService(ExecutorService service) { this.service = service; } }
graphalytics-core/src/main/java/nl/tudelft/graphalytics/BenchmarkSuiteExecutor.java
/* * Copyright 2015 Delft University of Technology * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package nl.tudelft.graphalytics; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Paths; import nl.tudelft.graphalytics.network.ExecutorService; import nl.tudelft.graphalytics.util.TimeUtility; import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import nl.tudelft.graphalytics.domain.Benchmark; import nl.tudelft.graphalytics.domain.BenchmarkResult; import nl.tudelft.graphalytics.domain.BenchmarkSuite; import nl.tudelft.graphalytics.domain.BenchmarkSuiteResult; import nl.tudelft.graphalytics.domain.BenchmarkSuiteResult.BenchmarkSuiteResultBuilder; import nl.tudelft.graphalytics.domain.Graph; import nl.tudelft.graphalytics.domain.GraphSet; import nl.tudelft.graphalytics.domain.NestedConfiguration; import nl.tudelft.graphalytics.domain.SystemDetails; import nl.tudelft.graphalytics.plugin.Plugins; import nl.tudelft.graphalytics.util.GraphFileManager; /** * Helper class for executing all benchmarks in a BenchmarkSuite on a specific Platform. * * @author Tim Hegeman */ public class BenchmarkSuiteExecutor { private static final Logger LOG = LogManager.getLogger(); private ExecutorService service; public static final String BENCHMARK_PROPERTIES_FILE = "benchmark.properties"; private final BenchmarkSuite benchmarkSuite; private final Platform platform; private final Plugins plugins; private final int timeoutDuration; /** * @param benchmarkSuite the suite of benchmarks to run * @param platform the platform instance to run the benchmarks on * @param plugins collection of loaded plugins */ public BenchmarkSuiteExecutor(BenchmarkSuite benchmarkSuite, Platform platform, Plugins plugins) { this.benchmarkSuite = benchmarkSuite; this.platform = platform; this.plugins = plugins; try { Configuration benchmarkConf = new PropertiesConfiguration(BENCHMARK_PROPERTIES_FILE); timeoutDuration = benchmarkConf.getInt("benchmark.run.timeout"); } catch (ConfigurationException e) { e.printStackTrace(); throw new IllegalStateException("Failed to load configurations from " + BENCHMARK_PROPERTIES_FILE); } // Init the executor service; ExecutorService.InitService(this); } /** * Executes the Graphalytics benchmark suite on the given platform. The benchmarks are grouped by graph so that each * graph is uploaded to the platform exactly once. After executing all benchmarks for a specific graph, the graph * is deleted from the platform. * * @return a BenchmarkSuiteResult object containing the gathered benchmark results and details */ public BenchmarkSuiteResult execute() { // TODO: Retrieve configuration for system, platform, and platform per benchmark // Use a BenchmarkSuiteResultBuilder to track the benchmark results gathered throughout execution BenchmarkSuiteResultBuilder benchmarkSuiteResultBuilder = new BenchmarkSuiteResultBuilder(benchmarkSuite); long totalStartTime = System.currentTimeMillis(); int finishedBenchmark = 0; int numBenchmark = benchmarkSuite.getBenchmarks().size(); LOG.info(""); LOG.info(String.format("This benchmark suite consists of %s benchmarks in total.", numBenchmark)); for (GraphSet graphSet : benchmarkSuite.getGraphSets()) { for (Graph graph : graphSet.getGraphs()) { LOG.debug(String.format("Preparing for %s benchmark runs that use graph %s.", benchmarkSuite.getBenchmarksForGraph(graph).size(), graph.getName())); LOG.info(""); LOG.info(String.format("=======Start of Upload Graph %s =======", graph.getName())); // Skip the graph if there are no benchmarks to run on it if (benchmarkSuite.getBenchmarksForGraph(graph).isEmpty()) { continue; } // Ensure that the graph input files exist (i.e. generate them from the GraphSet sources if needed) try { GraphFileManager.ensureGraphFilesExist(graph); } catch (IOException ex) { LOG.error("Can not ensure that graph \"" + graph.getName() + "\" exists, skipping.", ex); continue; } // Upload the graph try { platform.uploadGraph(graph); } catch (Exception ex) { LOG.error("Failed to upload graph \"" + graph.getName() + "\", skipping.", ex); continue; } LOG.info(String.format("=======End of Upload Graph %s =======", graph.getName())); LOG.info(""); // Execute all benchmarks for this graph for (Benchmark benchmark : benchmarkSuite.getBenchmarksForGraph(graph)) { // Ensure that the output directory exists, if needed if (benchmark.isOutputRequired()) { try { Files.createDirectories(Paths.get(benchmark.getOutputPath()).getParent()); } catch (IOException e) { LOG.error("Failed to create output directory \"" + Paths.get(benchmark.getOutputPath()).getParent() + "\", skipping.", e); continue; } } String benchmarkText = String.format("%s:\"%s on %s\"", benchmark.getId(), benchmark.getAlgorithm().getAcronym(), graphSet.getName()); LOG.info(""); LOG.info(String.format("=======Start of Benchmark %s [%s/%s]=======", benchmark.getId(), finishedBenchmark + 1, numBenchmark)); // Execute the pre-benchmark steps of all plugins plugins.preBenchmark(benchmark); LOG.info(String.format("Benchmark %s started.", benchmarkText)); Process process = BenchmarkRunner.InitializeJvmProcess(platform.getName(), benchmark.getId()); BenchmarkRunnerInfo runnerInfo = new BenchmarkRunnerInfo(benchmark, process); ExecutorService.runnerInfos.put(benchmark.getId(), runnerInfo); // wait for runner to get started. long waitingStarted; LOG.info("Initializing benchmark runner..."); waitingStarted = System.currentTimeMillis(); while (!runnerInfo.isRegistered()) { if(System.currentTimeMillis() - waitingStarted > 10 * 1000) { LOG.error("There is no response from the benchmark runner. Benchmark run failed."); break; } else { TimeUtility.waitFor(1); } } LOG.info("The benchmark runner is initialized."); LOG.info("Running benchmark..."); LOG.info("Benchmark logs are stored at: \"" + benchmark.getLogPath() +"\"."); LOG.info("Waiting for completion... (Timeout after " + timeoutDuration + " seconds)"); waitingStarted = System.currentTimeMillis(); while (!runnerInfo.isCompleted()) { if(System.currentTimeMillis() - waitingStarted > timeoutDuration * 1000) { LOG.error("Timeout is reached. This benchmark run is skipped."); break; } else { TimeUtility.waitFor(1); } } BenchmarkRunner.TerminateJvmProcess(process); BenchmarkResult benchmarkResult = runnerInfo.getBenchmarkResult(); if(benchmarkResult != null) { benchmarkSuiteResultBuilder.withBenchmarkResult(benchmarkResult); long makespan = (benchmarkResult.getEndOfBenchmark().getTime() - benchmarkResult.getStartOfBenchmark().getTime()); LOG.info(String.format("Benchmark %s %s (completed: %s, validated: %s), which took: %s ms.", benchmark.getId(), benchmarkResult.isSuccessful() ? "succeed" : "failed", benchmarkResult.isCompleted(), benchmarkResult.isValidated(), makespan)); } else { benchmarkSuiteResultBuilder.withoutBenchmarkResult(benchmark); LOG.info(String.format("Benchmark %s %s (completed: %s, validated: %s).", benchmark.getId(), "failed", false, false)); } LOG.info(String.format("Benchmark %s ended.", benchmarkText)); // Execute the post-benchmark steps of all plugins LOG.info(String.format("Cleaning up %s.", benchmarkText)); platform.cleanup(benchmark); plugins.postBenchmark(benchmark, benchmarkResult); finishedBenchmark++; LOG.info(String.format("=======End of Benchmark %s [%s/%s]=======", benchmark.getId(), finishedBenchmark, numBenchmark)); LOG.info(""); } // Delete the graph platform.deleteGraph(graph.getName()); } } service.terminate(); long totalEndTime = System.currentTimeMillis(); long totalDuration = totalEndTime - totalStartTime; // Dump the used configuration NestedConfiguration benchmarkConfiguration = NestedConfiguration.empty(); try { Configuration configuration = new PropertiesConfiguration("benchmark.properties"); benchmarkConfiguration = NestedConfiguration.fromExternalConfiguration(configuration, "benchmark.properties"); } catch (ConfigurationException e) { // Already reported during loading of benchmark } // Construct the BenchmarkSuiteResult return benchmarkSuiteResultBuilder.buildFromConfiguration(SystemDetails.empty(), benchmarkConfiguration, platform.getPlatformConfiguration(), totalDuration); } public void setService(ExecutorService service) { this.service = service; } }
Update logging mechanism.
graphalytics-core/src/main/java/nl/tudelft/graphalytics/BenchmarkSuiteExecutor.java
Update logging mechanism.
<ide><path>raphalytics-core/src/main/java/nl/tudelft/graphalytics/BenchmarkSuiteExecutor.java <ide> LOG.info("The benchmark runner is initialized."); <ide> <ide> LOG.info("Running benchmark..."); <del> LOG.info("Benchmark logs are stored at: \"" + benchmark.getLogPath() +"\"."); <add> LOG.info("Benchmark logs at: \"" + benchmark.getLogPath() +"\"."); <ide> LOG.info("Waiting for completion... (Timeout after " + timeoutDuration + " seconds)"); <ide> waitingStarted = System.currentTimeMillis(); <ide> while (!runnerInfo.isCompleted()) { <ide> finishedBenchmark++; <ide> LOG.info(String.format("=======End of Benchmark %s [%s/%s]=======", benchmark.getId(), finishedBenchmark, numBenchmark)); <ide> LOG.info(""); <add> LOG.info(""); <ide> } <ide> <ide> // Delete the graph
JavaScript
mit
7bf69ace57ba82dbd0d4c32308f07f69a19781da
0
react-circuit/ultra,gt3/ultra-router
function noop() {} function id(x) { return x } function isFn(t) { return typeof t === 'function' ? t : void 0 } const strProto = Object.getPrototypeOf('') function isStr(s) { return Object.getPrototypeOf(Object(s)) === strProto } function empty(t) { return !t || (!t.length && !Object.keys(t).length) } export { id, isFn, isStr, empty } function makeArray(arr) { return Array.isArray(arr) ? arr : empty(arr) ? [] : [arr] } function pipe(...fns) { function invoke(v) { return fns.reduce((acc, fn) => (fn ? fn.call(this, acc) : acc), v) } return fns.length > 0 ? invoke : id } const flattenToObj = (arr, base = {}) => Object.assign(base, ...arr) function exclude(t, ...keys) { return flattenToObj(Object.keys(t).filter(k => keys.indexOf(k) === -1).map(k => ({ [k]: t[k] }))) } function substitute(literals, values, removeEmpty) { let vals = Array.from(values, v => v || '') let lits = Array.from(literals, v => v || '') if (removeEmpty && lits.length > vals.length) { lits = [lits[0], ...lits.slice(1).map((l, i) => l || (vals[i] = ''))] } return String.raw({ raw: lits }, ...vals) } function escapeRx(string) { return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&') } function warnOn(truthy, msg) { return truthy && console.error(msg) } let $devWarnOn = noop if (process.env.NODE_ENV !== 'production') { $devWarnOn = warnOn } export { makeArray, pipe, flattenToObj, exclude, substitute, escapeRx, $devWarnOn } export class Timer { static isTimer(timer) { return timer && timer instanceof Timer ? timer : false } constructor(cb, ms = 0, autoRun = true) { this.run = this.run.bind(this, cb, ms) if (autoRun) this.run() } get active() { return !!this.ref } run(cb, ms) { return (this.ref = setTimeout(this.stop.bind(this, cb), ms)) } stop(cb) { clearTimeout(this.ref) this.ref = undefined return cb && cb() } }
src/router/utils.js
function noop() {} function isFn(t) { return typeof t === 'function' ? t : void 0 } const strProto = Object.getPrototypeOf('') function isStr(s) { return Object.getPrototypeOf(Object(s)) === strProto } function empty(t) { return !t || (!t.length && !Object.keys(t).length) } export { isFn, isStr, empty } function makeArray(arr) { return Array.isArray(arr) ? arr : empty(arr) ? [] : [arr] } function pipe(...fns) { function invoke(v) { return fns.reduce((acc, fn) => (fn ? fn.call(this, acc) : acc), v) } return invoke } const flattenToObj = (arr, base = {}) => Object.assign(base, ...arr) function exclude(t, ...keys) { return flattenToObj(Object.keys(t).filter(k => keys.indexOf(k) === -1).map(k => ({ [k]: t[k] }))) } function substitute(literals, values, removeEmpty) { let vals = Array.from(values, v => v || '') let lits = Array.from(literals, v => v || '') if (removeEmpty && lits.length > vals.length) { lits = [lits[0], ...lits.slice(1).map((l, i) => l || (vals[i] = ''))] } return String.raw({ raw: lits }, ...vals) } function escapeRx(string) { return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&') } function warnOn(truthy, msg) { return truthy && console.error(msg) } let $devWarnOn = noop if (process.env.NODE_ENV !== 'production') { $devWarnOn = warnOn } export { makeArray, pipe, flattenToObj, exclude, substitute, escapeRx, $devWarnOn } export class Timer { static isTimer(timer) { return timer && timer instanceof Timer ? timer : false } constructor(cb, ms = 0, autoRun = true) { this.run = this.run.bind(this, cb, ms) if (autoRun) this.run() } get active() { return !!this.ref } run(cb, ms) { return (this.ref = setTimeout(this.stop.bind(this, cb), ms)) } stop(cb) { clearTimeout(this.ref) this.ref = undefined return cb && cb() } }
pipe should return identity when no fn is provided
src/router/utils.js
pipe should return identity when no fn is provided
<ide><path>rc/router/utils.js <ide> function noop() {} <add> <add>function id(x) { return x } <ide> <ide> function isFn(t) { <ide> return typeof t === 'function' ? t : void 0 <ide> return !t || (!t.length && !Object.keys(t).length) <ide> } <ide> <del>export { isFn, isStr, empty } <add>export { id, isFn, isStr, empty } <ide> <ide> function makeArray(arr) { <ide> return Array.isArray(arr) ? arr : empty(arr) ? [] : [arr] <ide> function invoke(v) { <ide> return fns.reduce((acc, fn) => (fn ? fn.call(this, acc) : acc), v) <ide> } <del> return invoke <add> return fns.length > 0 ? invoke : id <ide> } <ide> <ide> const flattenToObj = (arr, base = {}) => Object.assign(base, ...arr)
Java
apache-2.0
f427414adc620073a034aa0f2ea38545070b6747
0
FUNCATE/TerraMobile,TerraMobile/TerraMobile,TerraMobile/Java-OpenMobility,FUNCATE/TerraMobile,FUNCATE/Java-OpenMobility,opengeospatial/Java-OpenMobility,TerraMobile/TerraMobile
/* * GeoPackage.java * * Copyright 2013, Augmented Technologies Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.augtech.geoapi.geopackage; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.RandomAccessFile; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeType; import org.opengis.feature.type.FeatureType; import org.opengis.feature.type.GeometryDescriptor; import org.opengis.feature.type.Name; import org.opengis.geometry.BoundingBox; import org.opengis.referencing.crs.CoordinateReferenceSystem; import com.augtech.geoapi.feature.SimpleFeatureImpl; import com.augtech.geoapi.geometry.BoundingBoxImpl; import com.augtech.geoapi.geopackage.geometry.GeometryDecoder; import com.augtech.geoapi.geopackage.geometry.OGCWKBWriter; import com.augtech.geoapi.geopackage.geometry.StandardGeometryDecoder; import com.augtech.geoapi.geopackage.table.FeatureField; import com.augtech.geoapi.geopackage.table.FeaturesTable; import com.augtech.geoapi.geopackage.table.FeaturesTable.GeometryInfo; import com.augtech.geoapi.geopackage.table.GpkgContents; import com.augtech.geoapi.geopackage.table.GpkgDataColumnConstraint; import com.augtech.geoapi.geopackage.table.GpkgDataColumnConstraint.DataColumnConstraint; import com.augtech.geoapi.geopackage.table.GpkgDataColumns; import com.augtech.geoapi.geopackage.table.GpkgExtensions; import com.augtech.geoapi.geopackage.table.GpkgExtensions.Extension; import com.augtech.geoapi.geopackage.table.GpkgGeometryColumns; import com.augtech.geoapi.geopackage.table.GpkgMetaData; import com.augtech.geoapi.geopackage.table.GpkgMetaDataReference; import com.augtech.geoapi.geopackage.table.GpkgSpatialRefSys; import com.augtech.geoapi.geopackage.table.GpkgTileMatrix; import com.augtech.geoapi.geopackage.table.GpkgTileMatrixSet; import com.augtech.geoapi.geopackage.table.GpkgTriggers; import com.augtech.geoapi.geopackage.table.TilesTable; import com.augtech.geoapi.geopackage.table.TilesTable.TileMatrixInfo; import com.augtech.geoapi.geopackage.views.GpkgView; import com.augtech.geoapi.geopackage.views.STGeometryColumns; import com.augtech.geoapi.geopackage.views.STSpatialRefSys; import com.augtech.geoapi.geopackage.views.SpatialRefSys; import com.augtech.geoapi.referncing.CoordinateReferenceSystemImpl; import com.vividsolutions.jts.geom.Envelope; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.io.ByteOrderValues; import com.vividsolutions.jts.simplify.DouglasPeuckerSimplifier; public class GeoPackage { protected ISQLDatabase sqlDB = null; protected File dbFile = null; public enum JavaType { INTEGER, STRING, BOOLEAN, FLOAT, DOUBLE, BYTE_ARR, UNKNOWN } /** A map of possible SQL Field types to {@link JavaType} enum values. * Field type names are all lowercase */ public Map<String, JavaType> sqlTypeMap = new HashMap<String, JavaType>(); public Logger log = Logger.getAnonymousLogger(); private Map<String, GpkgTable> sysTables = new HashMap<String, GpkgTable>(); private Map<String, GpkgView> sysViews = new HashMap<String, GpkgView>(); private Map<String, GpkgTable> userTables = new HashMap<String, GpkgTable>(); /** The name to create (if required) and test for use as a FeatureID within the GeoPackage */ public static String FEATURE_ID_FIELD_NAME = "feature_id"; /** For each new FeaturesTable, create an R*Tree index if the SQLite library supports it? * Default is True. If the library does not support R*Tree ({@link ISQLDatabase#hasRTreeEnabled()} * then indexes cannot be created. */ public static final boolean CREATE_RTREE_FOR_FEATURES = true; /** The OGC GeoPackage specification these statements relate to */ public static final String SPEC_VERSION = "OGC 12-128r9 - 0.9.7 - v8"; /** The maximum currently supported GeoPacakge version */ public static final int MAX_GPKG_VERSION = 0; /** The Sqlite registered application_id for a GeoPackage */ public static final int GPKG_APPLICATION_ID = Integer.decode("0x47503130"); /** The maximum number of vertices permissible on a single Geometry. * -1 = no limit */ protected int MAX_VERTEX_LIMIT = -1; /** If True, reading of GeoPackage headers, pragmas and Geometry encodings will * be validated against the specification and exceptions thrown if not valid. * If False, checks will be performed, but exceptions won't be thrown unless * data cannot be understood. Typical examples are the application_id pragma and Geometry.*/ public static boolean MODE_STRICT = true; /** The Geometry version to write in to the Geometry columns. Default is 0 * for Version 1.0 */ public static int GPKG_GEOM_HEADER_VERSION = 0; /** If {@code True} insert StandardGeoPackageBinary geometries into the GeoPackage. * If {@code False} then the Geometry header is set to ExtendedGeoPackageBinary * (which this implementation does not yet implement - clause 3.1.2, Annex K of spec). * Default is {@code True} */ public static boolean GPKG_GEOMETRY_STANDARD = true; /** Encode new Geometry in Little Endian order? Default is {@code False} */ public static boolean GPKG_GEOMETRY_LITTLE_ENDIAN = false; public static final int Z_M_VALUES_PROHIBIT = 0; public static final int Z_M_VALUES_MANDATORY = 1; public static final int Z_M_VALUES_OPTIONAL = 2; /** An array of extensions applicable to this GeoPackage */ protected Extension[] gpkgExtensions = null; /** The maximum number of records to fetch in one go through the cursor. Default is 1000. * Increasing this number <i>may</i> result in slightly faster queries on large recordsets, * but <i>could</i> also result in memory exceptions or missing records (especially on mobile * devices with limited memory. (Tested on Android at 1000) */ public static int MAX_RECORDS_PER_CURSOR = 1000; /** Connect to, or create a new GeoPackage with the supplied name and version.<p> * If the supplied name already exists then the database is checked to see if it * is a valid GeoPackage. If the supplied file does not exist, a new empty GeoPackage * is created with the supplied name. * * @param fileName The name of the GeoPackage to create or connect to. The .gpkg extension is added * if not supplied. * @param overwrite Overwrite the existing GeoPackage? * @throws Exception If an existing GeoPackage fails the validity check. * @see #isGPKGValid() */ public GeoPackage(ISQLDatabase sqlDB, boolean overwrite) { if (!sqlDB.getDatabaseFile().toString().endsWith(".gpkg")) throw new IllegalArgumentException("Invalid file extension for database - Must be .gpkg"); this.sqlDB = sqlDB; this.dbFile = sqlDB.getDatabaseFile(); if (overwrite) { if (dbFile.exists() && !dbFile.delete()) throw new IllegalArgumentException("Unable to overwrite GeoPackage file"); } // Load table definitions sysTables.put(GpkgSpatialRefSys.TABLE_NAME, new GpkgSpatialRefSys()); sysTables.put(GpkgContents.TABLE_NAME, new GpkgContents() ); sysTables.put(GpkgDataColumnConstraint.TABLE_NAME, new GpkgDataColumnConstraint()); sysTables.put(GpkgDataColumns.TABLE_NAME, new GpkgDataColumns()); sysTables.put(GpkgExtensions.TABLE_NAME, new GpkgExtensions()); sysTables.put(GpkgGeometryColumns.TABLE_NAME, new GpkgGeometryColumns()); sysTables.put(GpkgMetaData.TABLE_NAME, new GpkgMetaData()); sysTables.put(GpkgMetaDataReference.TABLE_NAME, new GpkgMetaDataReference()); sysTables.put(GpkgTileMatrix.TABLE_NAME, new GpkgTileMatrix()); sysTables.put(GpkgTileMatrixSet.TABLE_NAME, new GpkgTileMatrixSet()); sysViews.put(SpatialRefSys.VIEW_NAME, new SpatialRefSys()); sysViews.put(STGeometryColumns.VIEW_NAME, new STGeometryColumns()); sysViews.put(STSpatialRefSys.VIEW_NAME, new STSpatialRefSys()); //sysViews.put(GeometryColumns.VIEW_NAME, new GeometryColumns()); // Requires function definition // Look-ups for sql to Java sqlTypeMap.put("int", JavaType.INTEGER); sqlTypeMap.put("integer", JavaType.INTEGER); sqlTypeMap.put("tinyint", JavaType.INTEGER); sqlTypeMap.put("text", JavaType.STRING); sqlTypeMap.put("date", JavaType.STRING); sqlTypeMap.put("datetime", JavaType.STRING); sqlTypeMap.put("string", JavaType.STRING); sqlTypeMap.put("boolean", JavaType.BOOLEAN); sqlTypeMap.put("float", JavaType.FLOAT); sqlTypeMap.put("double", JavaType.DOUBLE); sqlTypeMap.put("real", JavaType.DOUBLE); sqlTypeMap.put("long", JavaType.DOUBLE); sqlTypeMap.put("geometry", JavaType.BYTE_ARR); sqlTypeMap.put("blob", JavaType.BYTE_ARR); sqlTypeMap.put("none", JavaType.BYTE_ARR); /* If the file alread exists, check it is a valid geopackage */ if (dbFile.exists()) { if (!isGPKGValid(false)) throw new IllegalArgumentException("GeoPackage "+dbFile.getName()+" failed integrity checks - Check the source."); } else { log.log(Level.INFO, "Database file does not exist. Creating new GeoPackage "+dbFile.getName()); // Create the DB file this.sqlDB.createDatabase(); for (GpkgTable tab : sysTables.values()) tab.create(this); for (GpkgView view : sysViews.values()) view.create(this); // Our standard triggers for (String stmt : GpkgTriggers.ALL_STANDARD_TRIGGERS) sqlDB.execSQL( stmt ); for (String stmt : GpkgSpatialRefSys.INSERT_DEFAULT_SPATIAL_REF_SYS) sqlDB.execSQL( stmt ); // Try setting the application_id pragma through Sqlite implementation if ( !setGpkgAppPragma() ) setGpkgAppHeader(); if (!isGPKGValid(true)) throw new IllegalArgumentException("GeoPackage "+dbFile.getName()+" failed integrity checks - Check the source."); } log.log(Level.INFO, "Connected to GeoPackage "+dbFile.getName()); } /** Get the name of the database file associated with this GeoPackage * * @return */ public String getDatabaseFileName() { return this.dbFile.toString(); } /** Close the underlying SQLite DB instance associated with this GeoPackge * */ public void close() { this.sqlDB.close(); } /** Check for the {@link #GPKG_APPLICATION_ID} in the database Pragma application_id * field. * * @return True if its set */ private boolean isGpkgAppPragmaSet() { boolean isGPKG = false; ICursor c = sqlDB.doRawQuery("pragma application_id"); if (c.moveToFirst()) { isGPKG = c.getInt(0)==GPKG_APPLICATION_ID; } c.close(); return isGPKG; } /** Set the GeoPackage application ID pragma. * * @return True if set successfully. */ private boolean setGpkgAppPragma() { if (!sqlDB.isOpen()) sqlDB.getDatabase(true); sqlDB.doRawQuery("pragma application_id="+GPKG_APPLICATION_ID); return isGpkgAppPragmaSet(); } /** Manually test whether the SQLite header contains the {@link #GPKG_APPLICATION_ID} * This is used as no current version of Android supports a version of Sqlite that supports the * pragma 'application_id', therefore we write to the header manually. * * @return True if its set. */ private boolean isGpkgAppHeaderSet() { if (sqlDB.isOpen()) sqlDB.close(); boolean isSet = false; try { RandomAccessFile raf = new RandomAccessFile(dbFile, "r"); raf.seek(68); int n68 = raf.readInt(); isSet = n68==GPKG_APPLICATION_ID; raf.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } return isSet; } /** Manually set the SQLite file header to include the {@link #GPKG_APPLICATION_ID}. * This is used as no current version of Android supports a version of Sqlite that supports the * pragma 'application_id', therefore we write to the header manually. * * @return True if set, false if there was an error. */ private boolean setGpkgAppHeader() { if (sqlDB.isOpen()) sqlDB.close(); /* */ try { RandomAccessFile raf = new RandomAccessFile(dbFile, "rw"); raf.seek(68); raf.writeInt( GPKG_APPLICATION_ID ); raf.close(); } catch (Exception e) { e.printStackTrace(); return false; } return true; } /** Check that the GeoPackage is valid according to tests outlined in the specification, * namely that the application_id is correct, a database integrity returns 'ok' and there * are no foreign key issues.<p> * This check is performed automatically when connecting to a GeoPackage, but should * be performed before passing a GeoPackage to another client application or service. * * @param doIntegrity True to perform a PRAGMA integrity_check. This can take a long * time on large files (>250mb), therefore it is only normally run * when a GeoPackage is created through this library. * * @return True if the checks pass. */ public boolean isGPKGValid(boolean doIntegrity) { boolean isGPKG = false; boolean integrity = false; boolean foreignKey = false; isGPKG = isGpkgAppPragmaSet(); if ( !isGPKG && MODE_STRICT ) isGPKG = isGpkgAppHeaderSet(); sqlDB.getDatabase(false); ICursor c = null; if (doIntegrity) { c = sqlDB.doRawQuery("PRAGMA integrity_check"); if (c.moveToFirst()) { integrity = c.getString(0).equals("ok"); } c.close(); } else { integrity = true; } c = sqlDB.doRawQuery("PRAGMA foreign_key_check"); foreignKey = c.moveToFirst(); c.close(); // Check all system tables are in the database boolean tabsExist = true; for (GpkgTable gt : sysTables.values()) { if (!gt.isTableInDB(this)) { tabsExist = false; continue; } } return (isGPKG || MODE_STRICT==false) && integrity && !foreignKey && tabsExist; } /** Get the database associated with this GeoPackage * * @return */ public ISQLDatabase getDatabase() { return this.sqlDB; } /** Get all tiles in the table, at the specified zoom, in order to cover the supplied * bounding box. * * @param tableName The table to query * @param bbox The extents of the area to cover. * @param zoomLevel What tile level, or zoom, should the query get * @return * @throws Exception */ public List<SimpleFeature> getTiles(String tableName, BoundingBox bbox, int zoomLevel) throws Exception { log.log(Level.INFO, "BBOX query for images in "+tableName); List<SimpleFeature> allFeats = new ArrayList<SimpleFeature>(); GpkgTable tilesTable = getUserTable( tableName, GpkgTable.TABLE_TYPE_TILES ); // IF strict, check for primary key, although not essential for this query if (MODE_STRICT) { if (tilesTable.getPrimaryKey(this).equals("unknown")) throw new Exception("Primary key not defined on table "+tableName ); } // Is BBOX valid against the table or tile_matrix_set? if ( !checkBBOXAgainstLast(tilesTable, bbox, false, false)) return allFeats; // Tile matrix data for this table GpkgRecords tmRecs = getSystemTable(GpkgTileMatrix.TABLE_NAME).query( this, "table_name='"+tableName+"' AND zoom_level="+zoomLevel); if (tmRecs.getFieldInt(0, "zoom_level")!=zoomLevel) throw new Exception("Zoom level "+zoomLevel+" is not defined for this tile pyramid"); int tmWidth = tmRecs.getFieldInt(0, "tile_width"); int tmHeight = tmRecs.getFieldInt(0, "tile_height"); double pixX = tmRecs.getFieldDouble(0, "pixel_x_size"); double pixY = tmRecs.getFieldDouble(0, "pixel_y_size"); // Construct a temporary matrix_set bbox (for convenience) GpkgRecords tms = getSystemTable(GpkgTileMatrixSet.TABLE_NAME).query(this, "table_name='"+tilesTable.tableName+"'"); BoundingBox tmsBox = new BoundingBoxImpl( tms.getFieldDouble(0, "min_x"), tms.getFieldDouble(0, "max_x"), tms.getFieldDouble(0, "min_y"), tms.getFieldDouble(0, "max_y")); /* TODO Get all tiles in the table at the specified zoom and check the bounds?, * or something else... */ /* Calculate the min and max rows and columns. * This mechanism works for 3857 (slippy tiles) but serious doubt it does for * anything else, therefore have to test with other projections and create a generic * mechanism for creating a where clause from a bounding box */ int minX = (int) Math.round( (bbox.getMinX() - tmsBox.getMinX() ) / (tmWidth * pixX) ); int maxX = (int) Math.round( (bbox.getMaxX() - tmsBox.getMinX() ) / (tmWidth * pixX) ); int minY = (int) Math.round( (tmsBox.getMaxY() - bbox.getMaxY() ) / (tmHeight * pixY) ); int maxY = (int) Math.round( (tmsBox.getMaxY() - bbox.getMinY() ) / (tmHeight * pixY) ); String strWhere = String.format( "zoom_level=%s AND tile_column >= %s AND tile_column <= %s AND tile_row >=%s AND tile_row <=%s", zoomLevel, minX, maxX, minY, maxY); return getTiles(tableName, strWhere); } /** Query the GeoPackage for one or more tiles based on a where clause. * The SimpleFeature's that are returned have a {@linkplain FeatureType} name * matching the tableName and a {@link GeometryDescriptor} mathing that defined * in gpkg_contents for the table.<p> * The feature id (accessible via {@link SimpleFeature#getID()}) is the of the form * <code>TableName-RecordID-zoom-row_ref-col_ref (or tableName-id-zoom-x-y) </code><p> * The image data is stored as a byte[] on an attribute named 'the_image' and the bounds * of the tile are stored as a {@link BoundingBox} on an attribute named 'the_geom'. * * @param tableName The {@linkplain TilesTable#getTableName()} to query * @param whereClause The SQL where clause, excluding the word 'where' * @return A List of {@linkplain SimpleFeature}'s * @throws Exception */ public List<SimpleFeature> getTiles(String tableName, String whereClause) throws Exception { log.log(Level.INFO, "WHERE query for images in "+tableName); List<SimpleFeature> allFeats = new ArrayList<SimpleFeature>(); TilesTable tilesTable = (TilesTable)getUserTable( tableName, GpkgTable.TABLE_TYPE_TILES ); // IF strict, check for primary key, although not essential for this query if (MODE_STRICT) { if (tilesTable.getPrimaryKey(this).equals("unknown")) throw new Exception("Primary key not defined on table "+tableName ); } // Get the records matching our query GpkgRecords featRecords = tilesTable.query(this, whereClause); if (featRecords.size()==0) return allFeats; // Construct the feature type SimpleFeatureType featureType = tilesTable.getSchema(); List<Object> attrValues = null; TileMatrixInfo tmi = ((TilesTable)tilesTable).getTileMatrixInfo(); // Now go through each record building the feature with it's attribute values for (int rIdx=0; rIdx < featRecords.size(); rIdx++) { // Create new list so previous values are not over-written attrValues = new ArrayList<Object>(); attrValues.add( featRecords.getFieldBlob(rIdx, "tile_data") ); // Construct bounding box for tile BoundingBox bbox = tmi.getTileBounds( featRecords.getFieldInt(rIdx, "tile_column"), featRecords.getFieldInt(rIdx, "tile_row"), featRecords.getFieldInt(rIdx, "zoom_level") ); attrValues.add( bbox ); // Tile details attrValues.add( featRecords.getFieldInt(rIdx, "tile_column") ); attrValues.add( featRecords.getFieldInt(rIdx, "tile_row") ); attrValues.add( featRecords.getFieldInt(rIdx, "zoom_level") ); // The ID for this tile String fid = String.format("%s-%s-%s-%s-%s", tableName, featRecords.getFieldInt(rIdx, "id"), featRecords.getFieldInt(rIdx, "tile_column"), featRecords.getFieldInt(rIdx, "tile_row"), featRecords.getFieldInt(rIdx, "zoom_level") ); // Create the feature and add to list of all features allFeats.add( new SimpleFeatureImpl(fid, attrValues, featureType ) ); } return allFeats; } /** Check if this feature is in the GeoPackage.<p> * The query is based on {@link SimpleFeatureType#getTypeName()} = tableName and * {@link SimpleFeature#getID()} = Table.featureFieldName * * @param simpleFeature The feature to test. * @return True if found */ public boolean isFeatureInGeoPackage(SimpleFeature simpleFeature) { String tableName = simpleFeature.getType().getTypeName(); FeaturesTable featTable = (FeaturesTable)getUserTable( tableName, GpkgTable.TABLE_TYPE_FEATURES ); return featTable.isFeatureInTable(simpleFeature); } /** Get a list of all SimpleFeature's within, or intersecting with, the supplied BoundingBox.<p> * This version always performs an intersection test and does not check the bbox is within or * intersecting with the table extents. A StandardGeometryDecoder is used for reading feature * data. * * @param tableName The <i>case sensitive</i> table name in this GeoPackage to query. * @param bbox The {@link BoundingBox} to find features in, or intersecting with. * @return A list of {@linkplain SimpleFeature}'s * @throws Exception If the SRS of the supplied {@link BoundingBox} does not match the SRS of * the table being queried. */ public List<SimpleFeature> getFeatures(String tableName, BoundingBox bbox) throws Exception { return getFeatures(tableName, bbox, true, true, new StandardGeometryDecoder() ); } /** Get a list of {@link SimpleFeature} from the GeoPackage by specifying a where clause * (for example {@code featureId='pipe.1234'} or {@code id=1234} ) * * @param tableName The <i>case sensitive</i> table name that holds the feature (probably * the localName of {@link SimpleFeatureType#getName()} * @param whereClause The 'Where' clause, less the where. Passing Null will return * all records from the table, which is discouraged. * @param geomDecoder The type of {@linkplain GeometryDecoder} to use. * @return A list of SimpleFeature's or an empty list if none were found in the specified table * matching the the filter * * @throws Exception */ public List<SimpleFeature> getFeatures(String tableName, String whereClause, GeometryDecoder geomDecoder) throws Exception { FeaturesTable featTable = (FeaturesTable)getUserTable( tableName, GpkgTable.TABLE_TYPE_FEATURES ); String stmt = "SELECT * FROM ["+tableName+"]"; if (whereClause!=null && !whereClause.equals("")) stmt+=" WHERE "+whereClause; return getFeatures(stmt, featTable, geomDecoder); } /** Get a list of all SimpleFeature's within, or intersecting with, the supplied BoundingBox. * * @param tableName The <i>case sensitive</i> table name in this GeoPackage to query. * @param bbox The {@link BoundingBox} to find features in, or intersecting with. * @param includeIntersect Should feature's intersecting with the supplied box be returned? * @param testExtents Should the bbox be tested against the data extents in gpkg_contents before * issuing the query? If <code>False</code> a short test on the extents is performed. (In case table * extents are null) * @param geomDecoder The {@link GeometryDecoder} to use for reading feature geometries. * @return A list of {@linkplain SimpleFeature}'s * @throws Exception If the SRS of the supplied {@link BoundingBox} does not match the SRS of * the table being queried. */ public List<SimpleFeature> getFeatures(String tableName, BoundingBox bbox, boolean includeIntersect, boolean testExtents, GeometryDecoder geomDecoder) throws Exception { log.log(Level.INFO, "BBOX query for features in "+tableName); List<SimpleFeature> allFeats = new ArrayList<SimpleFeature>(); FeaturesTable featTable = (FeaturesTable)getUserTable( tableName, GpkgTable.TABLE_TYPE_FEATURES ); // Is BBOX valid against the table? if ( !checkBBOXAgainstLast(featTable, bbox, includeIntersect, testExtents)) return allFeats; GeometryInfo gi = featTable.getGeometryInfo(); StringBuffer sqlStmt = new StringBuffer(); String pk = featTable.getPrimaryKey(this); if (MODE_STRICT) { if (pk.equals("rowid")) throw new Exception("Primary key not defined on table "+featTable.getTableName() ); } // If this GeoPackage is RTREE enabled, use the spatial index if (sqlDB.hasRTreeEnabled() && gi.hasSpatialIndex()) { String idxTable = "[rtree_"+tableName+"_"+gi.getColumnName()+"]"; sqlStmt.append("SELECT [").append(tableName).append("].* FROM [").append(tableName).append("], "); sqlStmt.append(idxTable).append(" WHERE ["); sqlStmt.append(tableName).append("].").append(pk).append("="); sqlStmt.append(idxTable).append(".id"); sqlStmt.append(" AND MinX>=").append( bbox.getMinX() ); sqlStmt.append(" AND MaxX<=").append( bbox.getMaxX() ); sqlStmt.append(" AND MinY>=").append( bbox.getMinY() ); sqlStmt.append(" AND MaxY<=").append( bbox.getMaxY() ); return getFeatures(sqlStmt.toString(), featTable, geomDecoder); } /* Query all records in the feature table and check the header envelope * for matchin/ intersecting bounds. If the envelope is null, then the full * geometry is read and checked */ sqlStmt.append("SELECT * FROM [").append(tableName).append("] WHERE id IN("); // Query only for feature geometry and test that before getting all attributes long startTime = System.currentTimeMillis(); int totalRecs = featTable.getCount(this); int lastPK = 0, recCount = 0, hitCount = 0; boolean hit = false; Envelope headerEnv = null; Envelope query = new Envelope(bbox.getMinX(), bbox.getMaxX(), bbox.getMinY(), bbox.getMaxY()); /* Deprecated getCount() on Cursor to save the cursor iterating * whole ResultSet on underlying Cursor implementation */ // While we have less records than total for table.. while (recCount < totalRecs) { String sql = String.format("SELECT %s,%s FROM [%s] WHERE %s > %s ORDER BY %s LIMIT %s", pk, gi.getColumnName(), tableName, pk, lastPK, pk, MAX_RECORDS_PER_CURSOR); ICursor cPage = getDatabase().doRawQuery( sql ); // Go through these x number of records boolean hasRecords = false; while (cPage.moveToNext()) { hasRecords = true; // Decode the geometry and test headerEnv = geomDecoder.setGeometryData( cPage.getBlob(1) ).getEnvelope(); // No bbox from header, so decode the whole geometry (a lot slower) if (headerEnv.isNull() && !geomDecoder.isEmptyGeom()) { headerEnv = geomDecoder.getGeometry().getEnvelopeInternal(); } // Test bounds hit = (includeIntersect ? query.intersects( headerEnv ) : false) || query.contains( headerEnv ) || headerEnv.contains( query ); if (hit) { sqlStmt.append(cPage.getInt(0)).append(","); hitCount++; } // Store the last key we saw for the next page query lastPK = cPage.getInt(0); recCount++; } cPage.close(); if (hasRecords==false) break; } log.log(Level.INFO, recCount+" geometries checked in "+(System.currentTimeMillis()-startTime)/1000+" seconds"); // Didn't find anything if (hitCount==0) return allFeats; sqlStmt.setLength(sqlStmt.length()-1);// How many id's can the DB handle?? sqlStmt.append(");"); return getFeatures(sqlStmt.toString(), featTable, geomDecoder ); } /** Get a list of {@link SimpleFeature} from the GeoPackage by specifying a full SQL statement. * * @param sqlStatement * @param featTable * @param geomDecoder The type of {@linkplain GeometryDecoder} to use. * @return A list of SimpleFeature's or an empty list if none were found in the specified table * matching the the filter * @throws Exception */ protected List<SimpleFeature> getFeatures(String sqlStatement, FeaturesTable featTable, GeometryDecoder geomDecoder) throws Exception { List<SimpleFeature> allFeats = new ArrayList<SimpleFeature>(); int totalRecs = featTable.getCount(this); if (totalRecs==0) return allFeats; SimpleFeatureType featureType = featTable.getSchema(); List<AttributeType> attrTypes = featureType.getTypes(); GeometryInfo geomInfo = featTable.getGeometryInfo(); // Find the feature id field String featureFieldName = ""; for (GpkgField gf : featTable.getFields() ) { if ( ((FeatureField)gf).isFeatureID() ) { featureFieldName = gf.getFieldName(); break; } } /* Query the table in 'pages' of LIMIT number */ long startTime = System.currentTimeMillis(); String pk = featTable.getPrimaryKey(this); if (MODE_STRICT) { if (pk.equals("rowid")) throw new Exception("Primary key not defined on table "+featTable.getTableName() ); } int lastPK = 0, recCount = 0; sqlStatement = sqlStatement.endsWith(";") ? sqlStatement.substring(0, sqlStatement.length()-1) : sqlStatement; int whereIdx = sqlStatement.toLowerCase().indexOf("where"); sqlStatement = whereIdx>0 ? sqlStatement+" AND " : sqlStatement+" WHERE "; ArrayList<Object> attrValues = new ArrayList<Object>(); Object value = null; String fid; GpkgRecords featRecords = null; String sql = ""; String fieldName = null; // While we have less records than total for table.. while (recCount < totalRecs) { sql = String.format(sqlStatement+"%s > %s ORDER BY %s LIMIT %s", pk, lastPK, pk, MAX_RECORDS_PER_CURSOR); featRecords = featTable.rawQuery(this, sql ); if (featRecords.size()==0) break; // Now go through each record building the feature with it's attribute values for (int rIdx=0; rIdx < featRecords.size(); rIdx++) { // Create new list so previous values are not overridden attrValues = new ArrayList<Object>(); fid = null; /* For each type definition, get the value, ensuring the * correct order is maintained on the value list*/ for (int typeIdx=0; typeIdx < attrTypes.size(); typeIdx++) { fieldName = attrTypes.get( typeIdx ).getName().getLocalPart(); value = featRecords.get(rIdx).get( featRecords.getFieldIdx(fieldName) ); // If defined as the feature's ID, store for feature creation if ( fieldName.equals(featureFieldName) ) { fid = String.valueOf( value ); continue; // Add as ID, not an attribute } else if (fieldName.equals(geomInfo.getColumnName())) { // If geometry column, decode to actual Geometry value = geomDecoder.setGeometryData( (byte[])value ).getGeometry(); } attrValues.add(value); } attrValues.trimToSize(); // Get or create a feature id? if (fid==null || fid.equals("null")) fid = featTable.getTableName()+"."+recCount; // Create the feature and add to list of all features allFeats.add( new SimpleFeatureImpl(fid, attrValues, featureType ) ); // Store the last key we saw for the next page query lastPK = featRecords.getFieldInt(rIdx, pk ); recCount++; } } featRecords = null; geomDecoder.clear(); log.log(Level.INFO, recCount+" features built in "+(System.currentTimeMillis()-startTime)/1000+" secs"); return allFeats; } /** Convenience method to check the passed bounding box (for a query) CRS matches * that on the {@link #lastFeatTable} and the bbox is within/ intersects with the * table boundingbox * * @param checkTable The table to check the query box against * @param queryBBox The query Bounding box * @param includeIntersect If vector/ feature data, should we test for intersection as * well as contains? * @param shortTest If True only the CRS's are tested to make sure they match. If False, the * table and/ or tile matrix set extents are tested as well. * @return True if checks pass */ private boolean checkBBOXAgainstLast(GpkgTable checkTable, BoundingBox queryBBox, boolean includeIntersect, boolean shortTest) { // Check the SRS's are the same (Projection beyond scope of implementation) BoundingBox tableBbox = checkTable.getBounds(); String qCode = queryBBox.getCoordinateReferenceSystem().getName().getCode(); String qCodeS = queryBBox.getCoordinateReferenceSystem().getName().getCodeSpace(); String tCode = tableBbox.getCoordinateReferenceSystem().getName().getCode(); String tCodeS = tableBbox.getCoordinateReferenceSystem().getName().getCodeSpace(); if (!qCode.equalsIgnoreCase(tCode) || !qCodeS.equalsIgnoreCase(tCodeS)) { log.log(Level.WARNING, "Passed bounding box SRS does not match table SRS"); return false; } if (shortTest) return true; /* If GpkgContents has null bounds for this table do full query, * otherwise test the table bounds */ boolean queryTable = false; if (!tableBbox.isEmpty()) { if (checkTable instanceof TilesTable) { // If tiles, bbox must be inside table extents queryTable = queryBBox.intersects( tableBbox ) || tableBbox.contains( queryBBox ); } else { // If features, inside or intersects queryTable = (includeIntersect ? queryBBox.intersects( tableBbox ) : false) || queryBBox.contains( tableBbox ) || tableBbox.contains(queryBBox); } } else { if (checkTable instanceof TilesTable) { // If a tiles table and no bounds in contents, check the tile_matrix_set definitions GpkgRecords tms = null; try { tms = getSystemTable(GpkgTileMatrixSet.TABLE_NAME).query(this, "table_name='"+checkTable.tableName+"'"); } catch (Exception e) { e.printStackTrace(); return false; } // Construct a bbox to test against CoordinateReferenceSystem crs = new CoordinateReferenceSystemImpl(""+tms.getFieldInt(0, "srs_id")); BoundingBox tmsBox = new BoundingBoxImpl( tms.getFieldDouble(0, "min_x"), tms.getFieldDouble(0, "max_x"), tms.getFieldDouble(0, "min_y"), tms.getFieldDouble(0, "max_y"), crs); queryTable = queryBBox.intersects( tmsBox ) || tmsBox.contains( queryBBox ); } } return queryTable; } /** Get a specific GeoPackage system table * * @param tableName * @return */ public GpkgTable getSystemTable(String tableName) { return sysTables.get(tableName); } /** Get one of the user defined tables by name. If the table has not * been loaded then it is created and cached. * * @param tableName The name of the table. * @param tableType Either {@link GpkgTable#TABLE_TYPE_FEATURES} || {@link GpkgTable#TABLE_TYPE_TILES} * @return An instance of the table. * @throws IllegalArgumentException if the table type is not one of the above, or the * table does not exist in the GeoPackage. */ public GpkgTable getUserTable(String tableName, String tableType) { GpkgTable gpkgTable = userTables.get(tableName); if (gpkgTable==null) { if (tableType.equals(GpkgTable.TABLE_TYPE_FEATURES) ) { gpkgTable = new FeaturesTable(this, tableName); } else if (tableType.equals(GpkgTable.TABLE_TYPE_TILES) ) { gpkgTable = new TilesTable(this, tableName); } else { throw new IllegalArgumentException("Incompatible user table type: "+tableType); } if (!gpkgTable.isTableInGpkg(this)) throw new IllegalArgumentException("Table "+tableName+" does not exist in the GeoPackage"); userTables.put(tableName, gpkgTable); } return gpkgTable; } /** Get a list of all user tables within the current GeoPackage.<p> * Note that the results of this query are not cached in the same way that system tables are and * the table data is not populated until a relevant method/ query (on the table) is * called. This allows for quicker/ lower cost checks on the number and/ or names of tables in * the GeoPackage. * * @param tableType Either {@link GpkgTable#TABLE_TYPE_FEATURES} or {@link GpkgTable#TABLE_TYPE_TILES} * @return A new list of tables or an empty list if none were found or the wrong tableType was specified. */ public List<GpkgTable> getUserTables(String tableType) { ArrayList<GpkgTable> ret = new ArrayList<GpkgTable>(); if (!tableType.equals(GpkgTable.TABLE_TYPE_FEATURES) && !tableType.equals(GpkgTable.TABLE_TYPE_TILES)) return ret; ICursor tables = null; try { tables = sysTables.get(GpkgContents.TABLE_NAME).query(this, new String[]{"table_name"}, "data_type='"+tableType+"'"); } catch (Exception e) { e.printStackTrace(); return ret; } GpkgTable tab = null; while(tables.moveToNext()) { if (tableType.equals(GpkgTable.TABLE_TYPE_FEATURES)) { tab = new FeaturesTable(this, tables.getString(0)); } else { tab = new TilesTable(this, tables.getString(0)); } ret.add(tab); } tables.close(); ret.trimToSize(); return ret; } /** Insert a collection of tiles in to the GeoPackage * * @param features * @return The number of tiles inserted * @throws Exception */ public int insertTiles(Collection<SimpleFeature> features) throws Exception { int numInserted = 0; long rec = -1; for (SimpleFeature sf : features) { rec = insertTile(sf); if( rec>-1 ) numInserted++; } return numInserted; } /** Insert a tile into the GeoPackage from a SimpleFeature.<p> * The tile reference is taken from the feature ID in the form of zoom/xRef/yRef * with or without leading information. The zoom/x/y should be the last three parts * of the ID, which can include a file extension.<p> * The first instance of a byte[] on the feature's attribute will be taken as the image * data. * * @param feature The {@link SimpleFeature} with details as above * @return The row id of the newly inserted record if successful * * @throws Exception If the table does not exist in the GeoPackage or the supplied * tile reference is not valid for the table or the attributes and/ or reference cannot * be decoded. */ public long insertTile(SimpleFeature feature) throws Exception { byte[] tileData = null; // Cycle feature attrs to get the image data (assumes first byte[] is image) for (int i=0; i<feature.getAttributeCount(); i++) { if (feature.getAttribute(i) instanceof byte[]) { tileData = (byte[]) feature.getAttribute(i); break; } } if (tileData==null) { throw new Exception("Could not find image data"); } //id=49/1/12/2023/1347.PNG.tile String[] idParts = feature.getID().split("/"); if (idParts.length<3) { throw new Exception("Could not decode tile reference from ID"); } int x=0, y=0, z=0; try { z = Integer.valueOf(idParts[idParts.length-3]); x = Integer.valueOf(idParts[idParts.length-2]); String sY = idParts[idParts.length-1]; y = Integer.valueOf(sY.substring(0, sY.indexOf("."))); } catch (Exception e) { throw new Exception("Could not decode tile reference from ID"); } return insertTile(feature.getType().getName().getLocalPart(), tileData, x, y, z); } /** Get a single tile by its zoom level column and row from this GeoPackage * * @param tableName The name of the table to query * @param x_col X reference (the column) * @param y_row Y reference (the row) * @param zoom The zoom level from the tile_matrix (generally between 0-18) * @return A byte[] or Null if no matching record is found * * @throws Exception */ public byte[] getTile(String tableName, int x_col, int y_row, int zoom) throws Exception { GpkgRecords recs = new TilesTable(this, tableName).query(this, String.format("zoom_level=%s AND tile_column=%s AND tile_row=%s", zoom, x_col, y_row) ); return recs.getFieldBlob(0, "tile_data"); } /** Insert a single raster tile into the GeoPackage * * @param tableName The tile table name * @param tile The tile image data * @param tileColumn The column ID (x) * @param tileRow The row ID (y) * @param zoom The zoom level for the tile * @return The row id of the newly inserted record if successful * * @throws Exception If the table does not exist in the GeoPackage or the supplied * tile reference is not valid for the table. */ public long insertTile(String tableName, byte[] tile, int tileColumn, int tileRow, int zoom) throws Exception { TilesTable tilesTable = (TilesTable)getUserTable( tableName, GpkgTable.TABLE_TYPE_TILES ); // Is this data jpeg or png (only permissible types) String pngHdr = new String( new byte[]{tile[0], tile[1], tile[2], tile[3]} ); String jpgHdr = Integer.toHexString(tile[0] & 0xFF)+Integer.toHexString(tile[1] & 0xFF); if (!pngHdr.toLowerCase().contains("png") && !jpgHdr.equalsIgnoreCase("ffd8")) { throw new Exception("Tile image is neither PNG or JPG"); } // Check the tile reference is valid for the tile-matrix GpkgRecords matrix = getSystemTable(GpkgTileMatrix.TABLE_NAME).query(this, "table_name='"+tableName+"' AND zoom_level="+zoom); int w = matrix.getFieldInt(0, "matrix_width"); int h = matrix.getFieldInt(0, "matrix_height"); if (tileColumn > w || tileColumn < 1 || tileRow > h || tileRow < 1 || w==-1 || h==-1) { throw new Exception("Supplied tile reference is outside the scope of the tile matrix for "+tableName); } Map<String, Object> values = new HashMap<String, Object>(); values.put("zoom_level", zoom); values.put("tile_column", tileColumn); values.put("tile_row", tileRow); values.put("tile_data", tile); long recID = tilesTable.insert(this, values); if (recID>0) updateLastChange(tilesTable.getTableName(), tilesTable.getTableType()); return recID; } /** Check if a SRS with the supplied code is loaded in GpkgSpatialRefSys * * @param srsName The string value of the srs_name or srs_id * @return True if loaded, false if not or there was an error. */ public boolean isSRSLoaded(String srsName) { boolean loaded = false; int srsID = -2; try { srsID = Integer.parseInt( srsName ); } catch (NumberFormatException ignore) { // TODO Look-up the EPSG code number somehow? } String strWhere = srsID>-2 ? "srs_id="+srsID : "srs_name='"+srsName+"'"; try { ICursor cur = getSystemTable(GpkgSpatialRefSys.TABLE_NAME).query( this, new String[]{"srs_name"}, strWhere); loaded = cur.moveToNext(); cur.close(); } catch (Exception e) { e.printStackTrace(); } return loaded; } /** Create a {@linkplain FeaturesTable} from a {@link SimpleFeatureType} * * @param featureType The SimpleFeatureType to use. * @param tableExtents The extents for this table * @return The new FeaturesTable * @throws Exception If the supplied data is invalid or constraints are not * met (i.e No matching SRS definition in the gpkg_spatial_ref_sys table) */ public FeaturesTable createFeaturesTable(SimpleFeatureType featureType, BoundingBox tableExtents) throws Exception { FeaturesTable ft = new FeaturesTable( this, featureType.getTypeName()); ft.create( featureType, tableExtents ); return ft; } /** Add all {@link SimpleFeature}'s on the supplied collection into the GeoPackage as a batch. * If there are multiple feature types within the collection they are * automatically split to their corresponding tables. * The table name to insert into is taken from the local part of * the {@link FeatureType#getName()}.<p> * The relevant tables must already exist in the GeoPackage. * * @param features * @return The number of records inserted * @throws Exception */ public int insertFeatures(Collection<SimpleFeature> features) throws Exception { /* Features within the collection could be different types, so split * in to seperate lists for batch insertion */ Map<Name, List<SimpleFeature>> sfByType = new HashMap<Name, List<SimpleFeature>>(); for (SimpleFeature sf : features) { Name tName = sf.getType().getName(); List<SimpleFeature> thisType = sfByType.get(tName); if (thisType==null) { thisType = new ArrayList<SimpleFeature>(); sfByType.put(tName, thisType); } thisType.add(sf); } int numInserted = 0; FeaturesTable featTable = null; // For each set of feature's in our individual lists.. for (Map.Entry<Name, List<SimpleFeature>> e : sfByType.entrySet()) { featTable = (FeaturesTable)getUserTable( e.getKey().getLocalPart(), GpkgTable.TABLE_TYPE_FEATURES ); List<Map<String, Object>> insertVals = new ArrayList<Map<String, Object>>(); Collection<GpkgField> tabFields = featTable.getFields(); // Get and check dimensional output int mOpt = featTable.getGeometryInfo().getMOption(); int zOpt = featTable.getGeometryInfo().getZOption(); int dimension = 2; if ( mOpt==Z_M_VALUES_MANDATORY || zOpt==Z_M_VALUES_MANDATORY || mOpt==Z_M_VALUES_OPTIONAL || zOpt==Z_M_VALUES_OPTIONAL) { dimension = 3; } if (mOpt==Z_M_VALUES_MANDATORY && zOpt==Z_M_VALUES_MANDATORY) throw new IllegalArgumentException("4 dimensional output is not supported"); // Build values for each feature of this type.. for (SimpleFeature sf : e.getValue()) { insertVals.add( buildInsertValues(sf, tabFields, dimension) ); } // Do the update on the table numInserted += featTable.insert(this, insertVals); insertVals = null; } sfByType = null; if (numInserted>0) updateLastChange(featTable.getTableName(), featTable.getTableType()); return numInserted; } /** Insert a single {@link SimpleFeature} into the GeoPackage. * The table name to insert into is taken from the local part of * the {@link FeatureType#getName()}. * * @param feature The SimpleFeature to insert. * @return The RowID of the new record or -1 if not inserted * @throws Exception * @see {@link #insertFeatures(Collection)} for batch processing many features */ public long insertFeature(SimpleFeature feature) throws Exception { SimpleFeatureType type = feature.getType(); FeaturesTable featTable = (FeaturesTable)getUserTable( type.getName().getLocalPart(), GpkgTable.TABLE_TYPE_FEATURES ); // Get and check dimensional output int mOpt = featTable.getGeometryInfo().getMOption(); int zOpt = featTable.getGeometryInfo().getZOption(); int dimension = 2; if ( mOpt==Z_M_VALUES_MANDATORY || zOpt==Z_M_VALUES_MANDATORY || mOpt==Z_M_VALUES_OPTIONAL || zOpt==Z_M_VALUES_OPTIONAL) { dimension = 3; } if (mOpt==Z_M_VALUES_MANDATORY && zOpt==Z_M_VALUES_MANDATORY) throw new IllegalArgumentException("4 dimensional output is not supported"); Map<String, Object> values = buildInsertValues(feature, featTable.getFields(), dimension); long recID = featTable.insert(this, values); if (recID>0) updateLastChange(featTable.getTableName(), featTable.getTableType()); return recID; } /** Create a Map of field name to field value for inserting into a table. * * @param feature The {@link SimpleFeature} * @param tabFields The GeoPackage table fields to use for building the map. * @param geomDimension 2 or 3 for the Geomaetry ordinates/ * @return A Map * @throws IOException */ private Map<String, Object> buildInsertValues(SimpleFeature feature, Collection<GpkgField> tabFields, int geomDimension) throws IOException { // Construct values SimpleFeatureType type = feature.getType(); Map<String, Object> values = new HashMap<String, Object>(); Object value = null; FeatureField field = null; boolean passConstraint = true; // For each field defined in the table... for (GpkgField f : tabFields) { if (f.isPrimaryKey()) continue; // We can't update the PK! field = (FeatureField)f; // If defined as feature id, use getID, else find the attribute if ( field.isFeatureID() ) { value = feature.getID(); } else { int idx = type.indexOf( field.getFieldName() ); /* If the field is not available on the type, set to null to ensure * the value list matches the table definition */ if (idx==-1 || idx > type.getAttributeCount()) { value = null; } else { value = feature.getAttribute(idx); } } passConstraint = true; // Check constraint if not a blob if (field.getMimeType()==null && field.getConstraint()!=null) { passConstraint = field.getConstraint().isValueValid( value ); } if(passConstraint) { if (value instanceof Geometry) { values.put(field.getFieldName(), encodeGeometry( (Geometry)value, geomDimension ) ); } else { values.put(field.getFieldName(), value); } } else { if (MODE_STRICT) { throw new IllegalArgumentException("Field "+field.getFieldName()+" did not pass constraint check"); } log.log(Level.WARNING, "Field "+field.getFieldName()+" did not pass constraint check; Inserting Null"); values.put(field.getFieldName(), null); } } return values; } /** Set a limit on the number of vertices permissible on a single geometry * when trying to insert new features. If the limit is exceeded then the * geometries are simplified using the supplied tolerance. * Default is no limit (-1) * * @param limitTo Max vertices * @param tolerance The tolerance to apply during simplification. This value * should be appropriate to the geometry's SRS */ public void setSimplifyOnInsertion(int limitTo, double tolerance) { this.MAX_VERTEX_LIMIT = limitTo; simpleTolerance = tolerance; } private double simpleTolerance = 1; /** Encode a JTS {@link Geometry} to standard GeoPackage geometry blob * * @param geom The Geometry to encode * @param outputDimension How many dimensions to write (2 or 3). JTS does not support 4 * @return * @throws IOException */ private byte[] encodeGeometry(Geometry geom, int outputDimension) throws IOException { if (geom==null) throw new IOException("Null Geometry passed"); if (outputDimension < 2 || outputDimension > 3) throw new IllegalArgumentException("Output dimension must be 2 or 3"); // Stop outrageous geometries from being encoded and inserted if (MAX_VERTEX_LIMIT>0) { int b4 = geom.getNumPoints(); if (b4 > MAX_VERTEX_LIMIT) { geom = DouglasPeuckerSimplifier.simplify(geom, simpleTolerance); geom.geometryChanged(); int af = geom.getNumPoints(); log.log(Level.WARNING, "Geometry Simplified for insertion: "+b4+" to "+af+" points"); } } ByteArrayOutputStream output = new ByteArrayOutputStream(); // 'Magic' and Version output.write( "GP".getBytes() ); output.write( GPKG_GEOM_HEADER_VERSION ); // Header flags int endianOrder = ByteOrderValues.BIG_ENDIAN; byte flags = 0; if (GPKG_GEOMETRY_LITTLE_ENDIAN) { flags = (byte) (flags | (1 << 0)); endianOrder = ByteOrderValues.LITTLE_ENDIAN; } if (!geom.getEnvelopeInternal().isNull()) { /* JTS Envelope geoms are only ever XY, not XYZ or XYZM * therefore we only ever set the 2nd bit to 1 */ flags = (byte) (flags | (1 << 1)); } if ( geom.isEmpty() ) { // Set envelope bit to 0 flags = (byte) (flags | (1 << 0)); // Flag the geometry is empty flags = (byte) (flags | (1 << 4)); } if (GPKG_GEOMETRY_STANDARD==false) { // ExtendedGeoPackageBinary encoding flags = (byte) (flags | (1 << 5)); } // Bits 7 and 8 are currently reserved and un-used output.write(flags); // SRS byte[] buffer = new byte[4]; ByteOrderValues.putInt(geom.getSRID(), buffer, endianOrder); output.write(buffer); Envelope envelope = geom.getEnvelopeInternal(); /* Geom envelope - JTS only supports 2 dimensional envelopes. If Geom is * empty then we don't encode an envelope */ if (!envelope.isNull() && !geom.isEmpty()) { buffer = new byte[8]; // Min X ByteOrderValues.putDouble(envelope.getMinX(), buffer, endianOrder); output.write(buffer); // Max X ByteOrderValues.putDouble(envelope.getMaxX(), buffer, endianOrder); output.write(buffer); // Min Y ByteOrderValues.putDouble(envelope.getMinY(), buffer, endianOrder); output.write(buffer); // Max Y ByteOrderValues.putDouble(envelope.getMaxY(), buffer, endianOrder); output.write(buffer); } // Write the geometry output.write( new OGCWKBWriter( outputDimension ).write(geom) ); buffer = output.toByteArray(); output.close(); return buffer; } /** Update last_change field in GpkgContents for the given table name and type * to 'now'. * * @param tableName * @param tableType */ private void updateLastChange(String tableName, String tableType) { Map<String, Object> values = new HashMap<String, Object>(); values.put("last_change", DateUtil.serializeDateTime(System.currentTimeMillis(), true) ); String where = String.format("table_name='%s' and data_type='%s'", tableName, tableType); getSystemTable(GpkgContents.TABLE_NAME).update(this, values, where); } /** Insert an OWS Context document correctly in to the GeoPackage.<p> * This method only allows for one Context Document within the GeoPackage. * * @param contextDoc Properly formatted document as a String (JSON or XML) * @param mimeType The encoding of the Context document (JSON or XML) * @param overwrite If <code>True</code> any current record is overwritten. If <code>False</code> * and there is an existing record then nothing is done and the method will * return <code>False</code>. * @return True if inserted/ updated successfully. */ public boolean insertOWSContext(String contextDoc, String mimeType, boolean overwrite) { if (contextDoc==null || contextDoc.equals("") || mimeType==null) return false; if ( !mimeType.equalsIgnoreCase("text/xml") && !mimeType.equalsIgnoreCase("application/xml") && !mimeType.equalsIgnoreCase("application/json") ) { throw new IllegalArgumentException("Incorrect mimeType specified"); } // Do we have an existing record? GpkgTable md = getSystemTable(GpkgMetaData.TABLE_NAME); ICursor c = md.query(this, new String[]{"id"}, "md_standard_uri='http://www.opengis.net/owc/1.0'"); int cID = -1; if (c.moveToNext()) { cID = c.getInt(0); c.close(); } Map<String, Object> values = new HashMap<String, Object>(); values.put("md_scope", GpkgMetaData.SCOPE_UNDEFINED); values.put("md_standard_uri", "http://www.opengis.net/owc/1.0"); values.put("mime_type", mimeType); values.put("metadata", contextDoc); boolean updated = false; long mdRec = -1, mdrRec = -1; if (overwrite && cID > -1) { updated = md.update(this, values, "id="+cID) > 0; } else if (cID > -1) { // Don't overwrite, but has record so return false return false; } else if (cID==-1) { // No record, so insert mdRec = getSystemTable(GpkgMetaData.TABLE_NAME).insert(this, values); } // Didn't insert or update if (mdRec==-1 && updated==false) return false; values.clear(); if (updated) { // Update timestamp values.put("timestamp", DateUtil.serializeDateTime(System.currentTimeMillis(), true) ); mdrRec = getSystemTable(GpkgMetaDataReference.TABLE_NAME).update(this, values, "md_file_id="+cID); } else { values.put("reference_scope", GpkgMetaDataReference.SCOPE_GEOPACKAGE); values.put("md_file_id", mdRec); mdrRec = getSystemTable(GpkgMetaDataReference.TABLE_NAME).insert(this, values); } // Rollback GpkgMetaData if reference not inserted if (mdrRec < 1) { getSystemTable(GpkgMetaData.TABLE_NAME).delete(this, "id="+mdRec); } return mdrRec > -1; } /** Get the String representation of an OWS Context Document from the GeoPackage.<p> * Only the first Context Document within the GeoPackage is returned * ( as defined by md_standard_uri='http://www.opengis.net/owc/1.0' ) * * @return String[] The first entry is the Context mime-type, the second is the * String representation of the document. */ public String[] getOWSContext() { String[] ret = new String[2]; ICursor c = getSystemTable(GpkgMetaData.TABLE_NAME).query( this, new String[]{"mime_type", "metadata"}, "md_standard_uri='http://www.opengis.net/owc/1.0'"); if(c.moveToFirst()) { ret = new String[] {c.getString(0), c.getString(1)}; } c.close(); return ret; } /** Add a new constraint to the GeoPackage that can be referenced, using the same constraint_name, * from gpkg_data_columns.<p> * * The constraint must be created before a record that uses it is inserted into gpkg_data_columns, therefore * constraint names specified on {@link AttributeType}'s via the user-data must be added through this * method prior to passing the attribute definitions to * {@link #createFeatureTable(String, String, List, List, BoundingBox, String, boolean)} * with dataColumns set to True.<p> * * Any existing constraint(s) in the GeoPackage with the same name are updated (delete-insert).<p> * * @param tableName The name of the table to apply this constraint to. * @param columnNames An array of column names to apply this constrain to, WRT the tableName * @param dcConstraint {@link DataColumnConstraint} * */ public long addDataColumnConstraint(String tableName, String[] columnNames, DataColumnConstraint dcConstraint) { if (dcConstraint==null || dcConstraint.isValid()==false) return -1L; GpkgDataColumnConstraint dcc = new GpkgDataColumnConstraint(); DataColumnConstraint existingDCC = dcc.getConstraint(this, dcConstraint.constraintName); if (existingDCC!=null) { if (existingDCC.constraintType.equals(GpkgDataColumnConstraint.TYPE_ENUM)) { /* Do we want to delete everything and re-insert, or check and update? * Currently delete everything and re-insert */ dcc.delete(this, "constraint_name='"+dcConstraint.constraintName+"'"); } else { dcc.delete(this, "constraint_name='"+dcConstraint.constraintName+"'"); } } // Insert the constraint long newRec = dcc.insert(this, dcConstraint.toMap()); // Didn't insert/ update so don't update feature table if (newRec ==-1) return -1L; // Update GpkgDataColumns for the specified columns Map<String, Object> vals = null; GpkgTable sys = getSystemTable(GpkgDataColumns.TABLE_NAME); for (String col : columnNames) { vals = new HashMap<String, Object>(); vals.put("constraint_name", dcConstraint.constraintName); sys.update(this, vals, "table_name='"+tableName+"' AND column_name='"+col+"';"); } return newRec; } /** Check that the supplied geometry type name is valid for a GeoPackage * * @param geomDescriptor The GeometryDescriptor to check from. * @return True if its valid */ public boolean isGeomTypeValid(GeometryDescriptor geomDescriptor) { String geomType = geomDescriptor.getType().getName().getLocalPart().toLowerCase(); if (geomType.equals("geometry")) { return true; } else if (geomType.equals("point")) { return true; } else if (geomType.equals("linestring")) { return true; } else if (geomType.equals("polygon")) { return true; } else if (geomType.equals("multipoint")) { return true; } else if (geomType.equals("multilinestring")) { return true; } else if (geomType.equals("multipolygon")) { return true; } else if (geomType.equals("geomcollection")) { return true; } else { return false; } } /** Encode basic Java types to those permissible in a GeoPackage * * @param object The object value to decode * @return A String usable for a table definition data type. Defaults to TEXT for * any unknown class or Object */ public static String encodeType(Class<?> clazz) { String name = clazz.getSimpleName().toLowerCase(); if (name.equals("integer") || name.equals("int")) { return "INTEGER"; } else if (name.equals("string")) { return "TEXT"; } else if (name.equals("boolean") || name.equals("byte")) { return "BOOL"; } else if (name.equals("double") || name.equals("float")) { return "REAL"; } else if (name.equals("long")) { return "INTEGER"; } else if (name.equals("geometry") || name.equals("byte[]")) { return "BLOB"; } return "TEXT"; } /** Decode SQLite data types to Java classes * * @param sqlType * @return */ public Class<?> decodeType(String sqlType) { JavaType jType = sqlTypeMap.get(sqlType.toLowerCase()); if (jType==null || jType==JavaType.UNKNOWN) throw new IllegalArgumentException("Unknown SQL data type '"+sqlType+"'"); switch (jType) { case INTEGER: return Integer.class; case STRING: return String.class; case BOOLEAN: return Boolean.class; case FLOAT: return Float.class; case DOUBLE: return Double.class; case BYTE_ARR: return Byte[].class; } return String.class; } }
AugTech_GeoAPI_Impl/com/augtech/geoapi/geopackage/GeoPackage.java
/* * GeoPackage.java * * Copyright 2013, Augmented Technologies Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.augtech.geoapi.geopackage; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.RandomAccessFile; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.feature.type.AttributeType; import org.opengis.feature.type.FeatureType; import org.opengis.feature.type.GeometryDescriptor; import org.opengis.feature.type.Name; import org.opengis.geometry.BoundingBox; import org.opengis.referencing.crs.CoordinateReferenceSystem; import com.augtech.geoapi.feature.SimpleFeatureImpl; import com.augtech.geoapi.geometry.BoundingBoxImpl; import com.augtech.geoapi.geopackage.geometry.GeometryDecoder; import com.augtech.geoapi.geopackage.geometry.OGCWKBWriter; import com.augtech.geoapi.geopackage.geometry.StandardGeometryDecoder; import com.augtech.geoapi.geopackage.table.FeatureField; import com.augtech.geoapi.geopackage.table.FeaturesTable; import com.augtech.geoapi.geopackage.table.FeaturesTable.GeometryInfo; import com.augtech.geoapi.geopackage.table.GpkgContents; import com.augtech.geoapi.geopackage.table.GpkgDataColumnConstraint; import com.augtech.geoapi.geopackage.table.GpkgDataColumnConstraint.DataColumnConstraint; import com.augtech.geoapi.geopackage.table.GpkgDataColumns; import com.augtech.geoapi.geopackage.table.GpkgExtensions; import com.augtech.geoapi.geopackage.table.GpkgExtensions.Extension; import com.augtech.geoapi.geopackage.table.GpkgGeometryColumns; import com.augtech.geoapi.geopackage.table.GpkgMetaData; import com.augtech.geoapi.geopackage.table.GpkgMetaDataReference; import com.augtech.geoapi.geopackage.table.GpkgSpatialRefSys; import com.augtech.geoapi.geopackage.table.GpkgTileMatrix; import com.augtech.geoapi.geopackage.table.GpkgTileMatrixSet; import com.augtech.geoapi.geopackage.table.GpkgTriggers; import com.augtech.geoapi.geopackage.table.TilesTable; import com.augtech.geoapi.geopackage.table.TilesTable.TileMatrixInfo; import com.augtech.geoapi.geopackage.views.GpkgView; import com.augtech.geoapi.geopackage.views.STGeometryColumns; import com.augtech.geoapi.geopackage.views.STSpatialRefSys; import com.augtech.geoapi.geopackage.views.SpatialRefSys; import com.augtech.geoapi.referncing.CoordinateReferenceSystemImpl; import com.vividsolutions.jts.geom.Envelope; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.io.ByteOrderValues; import com.vividsolutions.jts.simplify.DouglasPeuckerSimplifier; public class GeoPackage { protected ISQLDatabase sqlDB = null; protected File dbFile = null; public enum JavaType { INTEGER, STRING, BOOLEAN, FLOAT, DOUBLE, BYTE_ARR, UNKNOWN } /** A map of possible SQL Field types to {@link JavaType} enum values. * Field type names are all lowercase */ public Map<String, JavaType> sqlTypeMap = new HashMap<String, JavaType>(); public Logger log = Logger.getAnonymousLogger(); private Map<String, GpkgTable> sysTables = new HashMap<String, GpkgTable>(); private Map<String, GpkgView> sysViews = new HashMap<String, GpkgView>(); private Map<String, GpkgTable> userTables = new HashMap<String, GpkgTable>(); /** The name to create (if required) and test for use as a FeatureID within the GeoPackage */ public static String FEATURE_ID_FIELD_NAME = "feature_id"; /** For each new FeaturesTable, create an R*Tree index if the SQLite library supports it? * Default is True. If the library does not support R*Tree ({@link ISQLDatabase#hasRTreeEnabled()} * then indexes cannot be created. */ public static final boolean CREATE_RTREE_FOR_FEATURES = true; /** The OGC GeoPackage specification these statements relate to */ public static final String SPEC_VERSION = "OGC 12-128r9 - 0.9.7 - v8"; /** The maximum currently supported GeoPacakge version */ public static final int MAX_GPKG_VERSION = 0; /** The Sqlite registered application_id for a GeoPackage */ public static final int GPKG_APPLICATION_ID = Integer.decode("0x47503130"); /** The maximum number of vertices permissible on a single Geometry. * -1 = no limit */ protected int MAX_VERTEX_LIMIT = -1; /** If True, reading of GeoPackage headers, pragmas and Geometry encodings will * be validated against the specification and exceptions thrown if not valid. * If False, checks will be performed, but exceptions won't be thrown unless * data cannot be understood. Typical examples are the application_id pragma and Geometry.*/ public static boolean MODE_STRICT = true; /** The Geometry version to write in to the Geometry columns. Default is 0 * for Version 1.0 */ public static int GPKG_GEOM_HEADER_VERSION = 0; /** If {@code True} insert StandardGeoPackageBinary geometries into the GeoPackage. * If {@code False} then the Geometry header is set to ExtendedGeoPackageBinary * (which this implementation does not yet implement - clause 3.1.2, Annex K of spec). * Default is {@code True} */ public static boolean GPKG_GEOMETRY_STANDARD = true; /** Encode new Geometry in Little Endian order? Default is {@code False} */ public static boolean GPKG_GEOMETRY_LITTLE_ENDIAN = false; public static final int Z_M_VALUES_PROHIBIT = 0; public static final int Z_M_VALUES_MANDATORY = 1; public static final int Z_M_VALUES_OPTIONAL = 2; /** An array of extensions applicable to this GeoPackage */ protected Extension[] gpkgExtensions = null; /** The maximum number of records to fetch in one go through the cursor. Default is 1000. * Increasing this number <i>may</i> result in slightly faster queries on large recordsets, * but <i>could</i> also result in memory exceptions or missing records (especially on mobile * devices with limited memory. (Tested on Android at 1000) */ public static int MAX_RECORDS_PER_CURSOR = 1000; /** Connect to, or create a new GeoPackage with the supplied name and version.<p> * If the supplied name already exists then the database is checked to see if it * is a valid GeoPackage. If the supplied file does not exist, a new empty GeoPackage * is created with the supplied name. * * @param fileName The name of the GeoPackage to create or connect to. The .gpkg extension is added * if not supplied. * @param overwrite Overwrite the existing GeoPackage? * @throws Exception If an existing GeoPackage fails the validity check. * @see #isGPKGValid() */ public GeoPackage(ISQLDatabase sqlDB, boolean overwrite) { if (!sqlDB.getDatabaseFile().toString().endsWith(".gpkg")) throw new IllegalArgumentException("Invalid file extension for database - Must be .gpkg"); this.sqlDB = sqlDB; this.dbFile = sqlDB.getDatabaseFile(); if (overwrite) { if (dbFile.exists() && !dbFile.delete()) throw new IllegalArgumentException("Unable to overwrite GeoPackage file"); } // Load table definitions sysTables.put(GpkgSpatialRefSys.TABLE_NAME, new GpkgSpatialRefSys()); sysTables.put(GpkgContents.TABLE_NAME, new GpkgContents() ); sysTables.put(GpkgDataColumnConstraint.TABLE_NAME, new GpkgDataColumnConstraint()); sysTables.put(GpkgDataColumns.TABLE_NAME, new GpkgDataColumns()); sysTables.put(GpkgExtensions.TABLE_NAME, new GpkgExtensions()); sysTables.put(GpkgGeometryColumns.TABLE_NAME, new GpkgGeometryColumns()); sysTables.put(GpkgMetaData.TABLE_NAME, new GpkgMetaData()); sysTables.put(GpkgMetaDataReference.TABLE_NAME, new GpkgMetaDataReference()); sysTables.put(GpkgTileMatrix.TABLE_NAME, new GpkgTileMatrix()); sysTables.put(GpkgTileMatrixSet.TABLE_NAME, new GpkgTileMatrixSet()); sysViews.put(SpatialRefSys.VIEW_NAME, new SpatialRefSys()); sysViews.put(STGeometryColumns.VIEW_NAME, new STGeometryColumns()); sysViews.put(STSpatialRefSys.VIEW_NAME, new STSpatialRefSys()); //sysViews.put(GeometryColumns.VIEW_NAME, new GeometryColumns()); // Requires function definition // Look-ups for sql to Java sqlTypeMap.put("int", JavaType.INTEGER); sqlTypeMap.put("integer", JavaType.INTEGER); sqlTypeMap.put("tinyint", JavaType.INTEGER); sqlTypeMap.put("text", JavaType.STRING); sqlTypeMap.put("date", JavaType.STRING); sqlTypeMap.put("datetime", JavaType.STRING); sqlTypeMap.put("string", JavaType.STRING); sqlTypeMap.put("boolean", JavaType.BOOLEAN); sqlTypeMap.put("float", JavaType.FLOAT); sqlTypeMap.put("double", JavaType.DOUBLE); sqlTypeMap.put("real", JavaType.DOUBLE); sqlTypeMap.put("long", JavaType.DOUBLE); sqlTypeMap.put("geometry", JavaType.BYTE_ARR); sqlTypeMap.put("blob", JavaType.BYTE_ARR); sqlTypeMap.put("none", JavaType.BYTE_ARR); /* If the file alread exists, check it is a valid geopackage */ if (dbFile.exists()) { if (!isGPKGValid(false)) throw new IllegalArgumentException("GeoPackage "+dbFile.getName()+" failed integrity checks - Check the source."); } else { log.log(Level.INFO, "Database file does not exist. Creating new GeoPackage "+dbFile.getName()); // Create the DB file this.sqlDB.createDatabase(); for (GpkgTable tab : sysTables.values()) tab.create(this); for (GpkgView view : sysViews.values()) view.create(this); // Our standard triggers for (String stmt : GpkgTriggers.ALL_STANDARD_TRIGGERS) sqlDB.execSQL( stmt ); for (String stmt : GpkgSpatialRefSys.INSERT_DEFAULT_SPATIAL_REF_SYS) sqlDB.execSQL( stmt ); // Try setting the application_id pragma through Sqlite implementation if ( !setGpkgAppPragma() ) setGpkgAppHeader(); if (!isGPKGValid(true)) throw new IllegalArgumentException("GeoPackage "+dbFile.getName()+" failed integrity checks - Check the source."); } log.log(Level.INFO, "Connected to GeoPackage "+dbFile.getName()); } /** Get the name of the database file associated with this GeoPackage * * @return */ public String getDatabaseFileName() { return this.dbFile.toString(); } /** Close the underlying SQLite DB instance associated with this GeoPackge * */ public void close() { this.sqlDB.close(); } /** Check for the {@link #GPKG_APPLICATION_ID} in the database Pragma application_id * field. * * @return True if its set */ private boolean isGpkgAppPragmaSet() { boolean isGPKG = false; ICursor c = sqlDB.doRawQuery("pragma application_id"); if (c.moveToFirst()) { isGPKG = c.getInt(0)==GPKG_APPLICATION_ID; } c.close(); return isGPKG; } /** Set the GeoPackage application ID pragma. * * @return True if set successfully. */ private boolean setGpkgAppPragma() { if (!sqlDB.isOpen()) sqlDB.getDatabase(true); sqlDB.doRawQuery("pragma application_id="+GPKG_APPLICATION_ID); return isGpkgAppPragmaSet(); } /** Manually test whether the SQLite header contains the {@link #GPKG_APPLICATION_ID} * This is used as no current version of Android supports a version of Sqlite that supports the * pragma 'application_id', therefore we write to the header manually. * * @return True if its set. */ private boolean isGpkgAppHeaderSet() { if (sqlDB.isOpen()) sqlDB.close(); boolean isSet = false; try { RandomAccessFile raf = new RandomAccessFile(dbFile, "r"); raf.seek(68); int n68 = raf.readInt(); isSet = n68==GPKG_APPLICATION_ID; raf.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } return isSet; } /** Manually set the SQLite file header to include the {@link #GPKG_APPLICATION_ID}. * This is used as no current version of Android supports a version of Sqlite that supports the * pragma 'application_id', therefore we write to the header manually. * * @return True if set, false if there was an error. */ private boolean setGpkgAppHeader() { if (sqlDB.isOpen()) sqlDB.close(); /* */ try { RandomAccessFile raf = new RandomAccessFile(dbFile, "rw"); raf.seek(68); raf.writeInt( GPKG_APPLICATION_ID ); raf.close(); } catch (Exception e) { e.printStackTrace(); return false; } return true; } /** Check that the GeoPackage is valid according to tests outlined in the specification, * namely that the application_id is correct, a database integrity returns 'ok' and there * are no foreign key issues.<p> * This check is performed automatically when connecting to a GeoPackage, but should * be performed before passing a GeoPackage to another client application or service. * * @param doIntegrity True to perform a PRAGMA integrity_check. This can take a long * time on large files (>250mb), therefore it is only normally run * when a GeoPackage is created through this library. * * @return True if the checks pass. */ public boolean isGPKGValid(boolean doIntegrity) { boolean isGPKG = false; boolean integrity = false; boolean foreignKey = false; isGPKG = isGpkgAppPragmaSet(); if ( !isGPKG && MODE_STRICT ) isGPKG = isGpkgAppHeaderSet(); sqlDB.getDatabase(false); ICursor c = null; if (doIntegrity) { c = sqlDB.doRawQuery("PRAGMA integrity_check"); if (c.moveToFirst()) { integrity = c.getString(0).equals("ok"); } c.close(); } else { integrity = true; } c = sqlDB.doRawQuery("PRAGMA foreign_key_check"); foreignKey = c.moveToFirst(); c.close(); // Check all system tables are in the database boolean tabsExist = true; for (GpkgTable gt : sysTables.values()) { if (!gt.isTableInDB(this)) { tabsExist = false; continue; } } return (isGPKG || MODE_STRICT==false) && integrity && !foreignKey && tabsExist; } /** Get the database associated with this GeoPackage * * @return */ public ISQLDatabase getDatabase() { return this.sqlDB; } /** Get all tiles in the table, at the specified zoom, in order to cover the supplied * bounding box. * * @param tableName The table to query * @param bbox The extents of the area to cover. * @param zoomLevel What tile level, or zoom, should the query get * @return * @throws Exception */ public List<SimpleFeature> getTiles(String tableName, BoundingBox bbox, int zoomLevel) throws Exception { log.log(Level.INFO, "BBOX query for images in "+tableName); List<SimpleFeature> allFeats = new ArrayList<SimpleFeature>(); GpkgTable tilesTable = getUserTable( tableName, GpkgTable.TABLE_TYPE_TILES ); // IF strict, check for primary key, although not essential for this query if (MODE_STRICT) { if (tilesTable.getPrimaryKey(this).equals("unknown")) throw new Exception("Primary key not defined on table "+tableName ); } // Is BBOX valid against the table or tile_matrix_set? if ( !checkBBOXAgainstLast(tilesTable, bbox, false, false)) return allFeats; // Tile matrix data for this table GpkgRecords tmRecs = getSystemTable(GpkgTileMatrix.TABLE_NAME).query( this, "table_name='"+tableName+"' AND zoom_level="+zoomLevel); if (tmRecs.getFieldInt(0, "zoom_level")!=zoomLevel) throw new Exception("Zoom level "+zoomLevel+" is not defined for this tile pyramid"); int tmWidth = tmRecs.getFieldInt(0, "tile_width"); int tmHeight = tmRecs.getFieldInt(0, "tile_height"); double pixX = tmRecs.getFieldDouble(0, "pixel_x_size"); double pixY = tmRecs.getFieldDouble(0, "pixel_y_size"); // Construct a temporary matrix_set bbox (for convenience) GpkgRecords tms = getSystemTable(GpkgTileMatrixSet.TABLE_NAME).query(this, "table_name='"+tilesTable.tableName+"'"); BoundingBox tmsBox = new BoundingBoxImpl( tms.getFieldDouble(0, "min_x"), tms.getFieldDouble(0, "max_x"), tms.getFieldDouble(0, "min_y"), tms.getFieldDouble(0, "max_y")); /* TODO Get all tiles in the table at the specified zoom and check the bounds?, * or something else... */ /* Calculate the min and max rows and columns. * This mechanism works for 3857 (slippy tiles) but serious doubt it does for * anything else, therefore have to test with other projections and create a generic * mechanism for creating a where clause from a bounding box */ int minX = (int) Math.round( (bbox.getMinX() - tmsBox.getMinX() ) / (tmWidth * pixX) ); int maxX = (int) Math.round( (bbox.getMaxX() - tmsBox.getMinX() ) / (tmWidth * pixX) ); int minY = (int) Math.round( (tmsBox.getMaxY() - bbox.getMaxY() ) / (tmHeight * pixY) ); int maxY = (int) Math.round( (tmsBox.getMaxY() - bbox.getMinY() ) / (tmHeight * pixY) ); String strWhere = String.format( "zoom_level=%s AND tile_column >= %s AND tile_column <= %s AND tile_row >=%s AND tile_row <=%s", zoomLevel, minX, maxX, minY, maxY); return getTiles(tableName, strWhere); } /** Query the GeoPackage for one or more tiles based on a where clause. * The SimpleFeature's that are returned have a {@linkplain FeatureType} name * matching the tableName and a {@link GeometryDescriptor} mathing that defined * in gpkg_contents for the table.<p> * The feature id (accessible via {@link SimpleFeature#getID()}) is the of the form * <code>TableName-RecordID-zoom-row_ref-col_ref (or tableName-id-zoom-x-y) </code><p> * The image data is stored as a byte[] on an attribute named 'the_image' and the bounds * of the tile are stored as a {@link BoundingBox} on an attribute named 'the_geom'. * * @param tableName The {@linkplain TilesTable#getTableName()} to query * @param whereClause The SQL where clause, excluding the word 'where' * @return A List of {@linkplain SimpleFeature}'s * @throws Exception */ public List<SimpleFeature> getTiles(String tableName, String whereClause) throws Exception { log.log(Level.INFO, "WHERE query for images in "+tableName); List<SimpleFeature> allFeats = new ArrayList<SimpleFeature>(); TilesTable tilesTable = (TilesTable)getUserTable( tableName, GpkgTable.TABLE_TYPE_TILES ); // IF strict, check for primary key, although not essential for this query if (MODE_STRICT) { if (tilesTable.getPrimaryKey(this).equals("unknown")) throw new Exception("Primary key not defined on table "+tableName ); } // Get the records matching our query GpkgRecords featRecords = tilesTable.query(this, whereClause); if (featRecords.size()==0) return allFeats; // Construct the feature type SimpleFeatureType featureType = tilesTable.getSchema(); List<Object> attrValues = null; TileMatrixInfo tmi = ((TilesTable)tilesTable).getTileMatrixInfo(); // Now go through each record building the feature with it's attribute values for (int rIdx=0; rIdx < featRecords.size(); rIdx++) { // Create new list so previous values are not over-written attrValues = new ArrayList<Object>(); attrValues.add( featRecords.getFieldBlob(rIdx, "tile_data") ); // Construct bounding box for tile BoundingBox bbox = tmi.getTileBounds( featRecords.getFieldInt(rIdx, "tile_column"), featRecords.getFieldInt(rIdx, "tile_row"), featRecords.getFieldInt(rIdx, "zoom_level") ); attrValues.add( bbox ); // Tile details attrValues.add( featRecords.getFieldInt(rIdx, "tile_column") ); attrValues.add( featRecords.getFieldInt(rIdx, "tile_row") ); attrValues.add( featRecords.getFieldInt(rIdx, "zoom_level") ); // The ID for this tile String fid = String.format("%s-%s-%s-%s-%s", tableName, featRecords.getFieldInt(rIdx, "id"), featRecords.getFieldInt(rIdx, "tile_column"), featRecords.getFieldInt(rIdx, "tile_row"), featRecords.getFieldInt(rIdx, "zoom_level") ); // Create the feature and add to list of all features allFeats.add( new SimpleFeatureImpl(fid, attrValues, featureType ) ); } return allFeats; } /** Check if this feature is in the GeoPackage.<p> * The query is based on {@link SimpleFeatureType#getTypeName()} = tableName and * {@link SimpleFeature#getID()} = Table.featureFieldName * * @param simpleFeature The feature to test. * @return True if found */ public boolean isFeatureInGeoPackage(SimpleFeature simpleFeature) { String tableName = simpleFeature.getType().getTypeName(); FeaturesTable featTable = (FeaturesTable)getUserTable( tableName, GpkgTable.TABLE_TYPE_FEATURES ); return featTable.isFeatureInTable(simpleFeature); } /** Get a list of all SimpleFeature's within, or intersecting with, the supplied BoundingBox.<p> * This version always performs an intersection test and does not check the bbox is within or * intersecting with the table extents. A StandardGeometryDecoder is used for reading feature * data. * * @param tableName The <i>case sensitive</i> table name in this GeoPackage to query. * @param bbox The {@link BoundingBox} to find features in, or intersecting with. * @return A list of {@linkplain SimpleFeature}'s * @throws Exception If the SRS of the supplied {@link BoundingBox} does not match the SRS of * the table being queried. */ public List<SimpleFeature> getFeatures(String tableName, BoundingBox bbox) throws Exception { return getFeatures(tableName, bbox, true, true, new StandardGeometryDecoder() ); } /** Get a list of {@link SimpleFeature} from the GeoPackage by specifying a where clause * (for example {@code featureId='pipe.1234'} or {@code id=1234} ) * * @param tableName The <i>case sensitive</i> table name that holds the feature (probably * the localName of {@link SimpleFeatureType#getName()} * @param whereClause The 'Where' clause, less the where. Passing Null will return * all records from the table, which is discouraged. * @param geomDecoder The type of {@linkplain GeometryDecoder} to use. * @return A list of SimpleFeature's or an empty list if none were found in the specified table * matching the the filter * * @throws Exception */ public List<SimpleFeature> getFeatures(String tableName, String whereClause, GeometryDecoder geomDecoder) throws Exception { FeaturesTable featTable = (FeaturesTable)getUserTable( tableName, GpkgTable.TABLE_TYPE_FEATURES ); String stmt = "SELECT * FROM ["+tableName+"]"; if (whereClause!=null && !whereClause.equals("")) stmt+=" WHERE "+whereClause; return getFeatures(stmt, featTable, geomDecoder); } /** Get a list of all SimpleFeature's within, or intersecting with, the supplied BoundingBox. * * @param tableName The <i>case sensitive</i> table name in this GeoPackage to query. * @param bbox The {@link BoundingBox} to find features in, or intersecting with. * @param includeIntersect Should feature's intersecting with the supplied box be returned? * @param testExtents Should the bbox be tested against the data extents in gpkg_contents before * issuing the query? If <code>False</code> a short test on the extents is performed. (In case table * extents are null) * @param geomDecoder The {@link GeometryDecoder} to use for reading feature geometries. * @return A list of {@linkplain SimpleFeature}'s * @throws Exception If the SRS of the supplied {@link BoundingBox} does not match the SRS of * the table being queried. */ public List<SimpleFeature> getFeatures(String tableName, BoundingBox bbox, boolean includeIntersect, boolean testExtents, GeometryDecoder geomDecoder) throws Exception { log.log(Level.INFO, "BBOX query for features in "+tableName); List<SimpleFeature> allFeats = new ArrayList<SimpleFeature>(); FeaturesTable featTable = (FeaturesTable)getUserTable( tableName, GpkgTable.TABLE_TYPE_FEATURES ); // Is BBOX valid against the table? if ( !checkBBOXAgainstLast(featTable, bbox, includeIntersect, testExtents)) return allFeats; GeometryInfo gi = featTable.getGeometryInfo(); StringBuffer sqlStmt = new StringBuffer(); String pk = featTable.getPrimaryKey(this); if (MODE_STRICT) { if (pk.equals("rowid")) throw new Exception("Primary key not defined on table "+featTable.getTableName() ); } // If this GeoPackage is RTREE enabled, use the spatial index if (sqlDB.hasRTreeEnabled() && gi.hasSpatialIndex()) { String idxTable = "[rtree_"+tableName+"_"+gi.getColumnName()+"]"; sqlStmt.append("SELECT [").append(tableName).append("].* FROM [").append(tableName).append("], "); sqlStmt.append(idxTable).append(" WHERE ["); sqlStmt.append(tableName).append("].").append(pk).append("="); sqlStmt.append(idxTable).append(".id"); sqlStmt.append(" AND MinX>=").append( bbox.getMinX() ); sqlStmt.append(" AND MaxX<=").append( bbox.getMaxX() ); sqlStmt.append(" AND MinY>=").append( bbox.getMinY() ); sqlStmt.append(" AND MaxY<=").append( bbox.getMaxY() ); return getFeatures(sqlStmt.toString(), featTable, geomDecoder); } /* Query all records in the feature table and check the header envelope * for matchin/ intersecting bounds. If the envelope is null, then the full * geometry is read and checked */ sqlStmt.append("SELECT * FROM [").append(tableName).append("] WHERE id IN("); // Query only for feature geometry and test that before getting all attributes long startTime = System.currentTimeMillis(); int totalRecs = featTable.getCount(this); int lastPK = 0, recCount = 0, hitCount = 0; boolean hit = false; Envelope headerEnv = null; Envelope query = new Envelope(bbox.getMinX(), bbox.getMaxX(), bbox.getMinY(), bbox.getMaxY()); /* Deprecated getCount() on Cursor to save the cursor iterating * whole ResultSet on underlying Cursor implementation */ // While we have less records than total for table.. while (recCount < totalRecs) { String sql = String.format("SELECT %s,%s FROM [%s] WHERE %s > %s ORDER BY %s LIMIT %s", pk, gi.getColumnName(), tableName, pk, lastPK, pk, MAX_RECORDS_PER_CURSOR); ICursor cPage = getDatabase().doRawQuery( sql ); // Go through these x number of records boolean hasRecords = false; while (cPage.moveToNext()) { hasRecords = true; // Decode the geometry and test headerEnv = geomDecoder.setGeometryData( cPage.getBlob(1) ).getEnvelope(); // No bbox from header, so decode the whole geometry (a lot slower) if (headerEnv.isNull() && !geomDecoder.isEmptyGeom()) { headerEnv = geomDecoder.getGeometry().getEnvelopeInternal(); } // Test bounds hit = (includeIntersect ? query.intersects( headerEnv ) : false) || query.contains( headerEnv ) || headerEnv.contains( query ); if (hit) { sqlStmt.append(cPage.getInt(0)).append(","); hitCount++; } // Store the last key we saw for the next page query lastPK = cPage.getInt(0); recCount++; } cPage.close(); if (hasRecords==false) break; } log.log(Level.INFO, recCount+" geometries checked in "+(System.currentTimeMillis()-startTime)/1000+" seconds"); // Didn't find anything if (hitCount==0) return allFeats; sqlStmt.setLength(sqlStmt.length()-1);// How many id's can the DB handle?? sqlStmt.append(");"); return getFeatures(sqlStmt.toString(), featTable, geomDecoder ); } /** Get a list of {@link SimpleFeature} from the GeoPackage by specifying a full SQL statement. * * @param sqlStatement * @param featTable * @param geomDecoder The type of {@linkplain GeometryDecoder} to use. * @return A list of SimpleFeature's or an empty list if none were found in the specified table * matching the the filter * @throws Exception */ protected List<SimpleFeature> getFeatures(String sqlStatement, FeaturesTable featTable, GeometryDecoder geomDecoder) throws Exception { List<SimpleFeature> allFeats = new ArrayList<SimpleFeature>(); int totalRecs = featTable.getCount(this); if (totalRecs==0) return allFeats; SimpleFeatureType featureType = featTable.getSchema(); List<AttributeType> attrTypes = featureType.getTypes(); GeometryInfo geomInfo = featTable.getGeometryInfo(); // Find the feature id field String featureFieldName = ""; for (GpkgField gf : featTable.getFields() ) { if ( ((FeatureField)gf).isFeatureID() ) { featureFieldName = gf.getFieldName(); break; } } /* Query the table in 'pages' of LIMIT number */ long startTime = System.currentTimeMillis(); String pk = featTable.getPrimaryKey(this); if (MODE_STRICT) { if (pk.equals("rowid")) throw new Exception("Primary key not defined on table "+featTable.getTableName() ); } int lastPK = 0, recCount = 0; sqlStatement = sqlStatement.endsWith(";") ? sqlStatement.substring(0, sqlStatement.length()-1) : sqlStatement; int whereIdx = sqlStatement.toLowerCase().indexOf("where"); sqlStatement = whereIdx>0 ? sqlStatement+" AND " : sqlStatement+" WHERE "; ArrayList<Object> attrValues = new ArrayList<Object>(); Object value = null; String fid; GpkgRecords featRecords = null; String sql = ""; String fieldName = null; // While we have less records than total for table.. while (recCount < totalRecs) { sql = String.format(sqlStatement+"%s > %s ORDER BY %s LIMIT %s", pk, lastPK, pk, MAX_RECORDS_PER_CURSOR); featRecords = featTable.rawQuery(this, sql ); if (featRecords.size()==0) break; // Now go through each record building the feature with it's attribute values for (int rIdx=0; rIdx < featRecords.size(); rIdx++) { // Create new list so previous values are not overridden attrValues = new ArrayList<Object>(); fid = null; /* For each type definition, get the value, ensuring the * correct order is maintained on the value list*/ for (int typeIdx=0; typeIdx < attrTypes.size(); typeIdx++) { fieldName = attrTypes.get( typeIdx ).getName().getLocalPart(); value = featRecords.get(rIdx).get( featRecords.getFieldIdx(fieldName) ); // If defined as the feature's ID, store for feature creation if ( fieldName.equals(featureFieldName) ) { fid = String.valueOf( value ); continue; // Add as ID, not an attribute } else if (fieldName.equals(geomInfo.getColumnName())) { // If geometry column, decode to actual Geometry value = geomDecoder.setGeometryData( (byte[])value ).getGeometry(); } attrValues.add(value); } attrValues.trimToSize(); // Get or create a feature id? if (fid==null || fid.equals("null")) fid = featTable.getTableName()+"."+recCount; // Create the feature and add to list of all features allFeats.add( new SimpleFeatureImpl(fid, attrValues, featureType ) ); // Store the last key we saw for the next page query lastPK = featRecords.getFieldInt(rIdx, pk ); recCount++; } } featRecords = null; geomDecoder.clear(); log.log(Level.INFO, recCount+" features built in "+(System.currentTimeMillis()-startTime)/1000+" secs"); return allFeats; } /** Convenience method to check the passed bounding box (for a query) CRS matches * that on the {@link #lastFeatTable} and the bbox is within/ intersects with the * table boundingbox * * @param checkTable The table to check the query box against * @param queryBBox The query Bounding box * @param includeIntersect If vector/ feature data, should we test for intersection as * well as contains? * @param shortTest If True only the CRS's are tested to make sure they match. If False, the * table and/ or tile matrix set extents are tested as well. * @return True if checks pass */ private boolean checkBBOXAgainstLast(GpkgTable checkTable, BoundingBox queryBBox, boolean includeIntersect, boolean shortTest) { // Check the SRS's are the same (Projection beyond scope of implementation) BoundingBox tableBbox = checkTable.getBounds(); String qCode = queryBBox.getCoordinateReferenceSystem().getName().getCode(); String qCodeS = queryBBox.getCoordinateReferenceSystem().getName().getCodeSpace(); String tCode = tableBbox.getCoordinateReferenceSystem().getName().getCode(); String tCodeS = tableBbox.getCoordinateReferenceSystem().getName().getCodeSpace(); if (!qCode.equalsIgnoreCase(tCode) || !qCodeS.equalsIgnoreCase(tCodeS)) { log.log(Level.WARNING, "Passed bounding box SRS does not match table SRS"); return false; } if (shortTest) return true; /* If GpkgContents has null bounds for this table do full query, * otherwise test the table bounds */ boolean queryTable = false; if (!tableBbox.isEmpty()) { if (checkTable instanceof TilesTable) { // If tiles, bbox must be inside table extents queryTable = queryBBox.intersects( tableBbox ) || tableBbox.contains( queryBBox ); } else { // If features, inside or intersects queryTable = (includeIntersect ? queryBBox.intersects( tableBbox ) : false) || queryBBox.contains( tableBbox ) || tableBbox.contains(queryBBox); } } else { if (checkTable instanceof TilesTable) { // If a tiles table and no bounds in contents, check the tile_matrix_set definitions GpkgRecords tms = null; try { tms = getSystemTable(GpkgTileMatrixSet.TABLE_NAME).query(this, "table_name='"+checkTable.tableName+"'"); } catch (Exception e) { e.printStackTrace(); return false; } // Construct a bbox to test against CoordinateReferenceSystem crs = new CoordinateReferenceSystemImpl(""+tms.getFieldInt(0, "srs_id")); BoundingBox tmsBox = new BoundingBoxImpl( tms.getFieldDouble(0, "min_x"), tms.getFieldDouble(0, "max_x"), tms.getFieldDouble(0, "min_y"), tms.getFieldDouble(0, "max_y"), crs); queryTable = queryBBox.intersects( tmsBox ) || tmsBox.contains( queryBBox ); } } return queryTable; } /** Get a specific GeoPackage system table * * @param tableName * @return */ public GpkgTable getSystemTable(String tableName) { return sysTables.get(tableName); } /** Get one of the user defined tables by name. If the table has not * been loaded then it is created and cached. * * @param tableName The name of the table. * @param tableType Either {@link GpkgTable#TABLE_TYPE_FEATURES} || {@link GpkgTable#TABLE_TYPE_TILES} * @return An instance of the table. * @throws IllegalArgumentException if the table type is not one of the above, or the * table does not exist in the GeoPackage. */ public GpkgTable getUserTable(String tableName, String tableType) { GpkgTable gpkgTable = userTables.get(tableName); if (gpkgTable==null) { if (tableType.equals(GpkgTable.TABLE_TYPE_FEATURES) ) { gpkgTable = new FeaturesTable(this, tableName); } else if (tableType.equals(GpkgTable.TABLE_TYPE_TILES) ) { gpkgTable = new TilesTable(this, tableName); } else { throw new IllegalArgumentException("Incompatible user table type: "+tableType); } if (!gpkgTable.isTableInGpkg(this)) throw new IllegalArgumentException("Table "+tableName+" does not exist in the GeoPackage"); userTables.put(tableName, gpkgTable); } return gpkgTable; } /** Get a list of all user tables within the current GeoPackage.<p> * Note that the results of this query are not cached in the same way that system tables are and * the table data is not populated until a relevant method/ query (on the table) is * called. This allows for quicker/ lower cost checks on the number and/ or names of tables in * the GeoPackage. * * @param tableType Either {@link GpkgTable#TABLE_TYPE_FEATURES} or {@link GpkgTable#TABLE_TYPE_TILES} * @return A new list of tables or an empty list if none were found or the wrong tableType was specified. */ public List<GpkgTable> getUserTables(String tableType) { ArrayList<GpkgTable> ret = new ArrayList<GpkgTable>(); if (!tableType.equals(GpkgTable.TABLE_TYPE_FEATURES) && !tableType.equals(GpkgTable.TABLE_TYPE_TILES)) return ret; ICursor tables = null; try { tables = sysTables.get(GpkgContents.TABLE_NAME).query(this, new String[]{"table_name"}, "data_type='"+tableType+"'"); } catch (Exception e) { e.printStackTrace(); return ret; } GpkgTable tab = null; while(tables.moveToNext()) { if (tableType.equals(GpkgTable.TABLE_TYPE_FEATURES)) { tab = new FeaturesTable(this, tables.getString(0)); } else { tab = new TilesTable(this, tables.getString(0)); } ret.add(tab); } tables.close(); ret.trimToSize(); return ret; } /** Insert a collection of tiles in to the GeoPackage * * @param features * @return The number of tiles inserted * @throws Exception */ public int insertTiles(Collection<SimpleFeature> features) throws Exception { int numInserted = 0; long rec = -1; for (SimpleFeature sf : features) { rec = insertTile(sf); if( rec>-1 ) numInserted++; } return numInserted; } /** Insert a tile into the GeoPackage from a SimpleFeature.<p> * The tile reference is taken from the feature ID in the form of zoom/xRef/yRef * with or without leading information. The zoom/x/y should be the last three parts * of the ID, which can include a file extension.<p> * The first instance of a byte[] on the feature's attribute will be taken as the image * data. * * @param feature The {@link SimpleFeature} with details as above * @return The row id of the newly inserted record if successful * * @throws Exception If the table does not exist in the GeoPackage or the supplied * tile reference is not valid for the table or the attributes and/ or reference cannot * be decoded. */ public long insertTile(SimpleFeature feature) throws Exception { byte[] tileData = null; // Cycle feature attrs to get the image data (assumes first byte[] is image) for (int i=0; i<feature.getAttributeCount(); i++) { if (feature.getAttribute(i) instanceof byte[]) { tileData = (byte[]) feature.getAttribute(i); break; } } if (tileData==null) { throw new Exception("Could not find image data"); } //id=49/1/12/2023/1347.PNG.tile String[] idParts = feature.getID().split("/"); if (idParts.length<3) { throw new Exception("Could not decode tile reference from ID"); } int x=0, y=0, z=0; try { z = Integer.valueOf(idParts[idParts.length-3]); x = Integer.valueOf(idParts[idParts.length-2]); String sY = idParts[idParts.length-1]; y = Integer.valueOf(sY.substring(0, sY.indexOf("."))); } catch (Exception e) { throw new Exception("Could not decode tile reference from ID"); } return insertTile(feature.getType().getName().getLocalPart(), tileData, x, y, z); } /** Get a single tile by its zoom level column and row from this GeoPackage * * @param tableName The name of the table to query * @param x_col X reference (the column) * @param y_row Y reference (the row) * @param zoom The zoom level from the tile_matrix (generally between 0-18) * @return A byte[] or Null if no matching record is found * * @throws Exception */ public byte[] getTile(String tableName, int x_col, int y_row, int zoom) throws Exception { GpkgRecords recs = new TilesTable(this, tableName).query(this, String.format("zoom_level=%s AND tile_column=%s AND tile_row=%s", zoom, x_col, y_row) ); return recs.getFieldBlob(0, "tile_data"); } /** Insert a single raster tile into the GeoPackage * * @param tableName The tile table name * @param tile The tile image data * @param tileColumn The column ID (x) * @param tileRow The row ID (y) * @param zoom The zoom level for the tile * @return The row id of the newly inserted record if successful * * @throws Exception If the table does not exist in the GeoPackage or the supplied * tile reference is not valid for the table. */ public long insertTile(String tableName, byte[] tile, int tileColumn, int tileRow, int zoom) throws Exception { TilesTable tilesTable = (TilesTable)getUserTable( tableName, GpkgTable.TABLE_TYPE_TILES ); // Is this data jpeg or png (only permissible types) String pngHdr = new String( new byte[]{tile[0], tile[1], tile[2], tile[3]} ); String jpgHdr = Integer.toHexString(tile[0] & 0xFF)+Integer.toHexString(tile[1] & 0xFF); if (!pngHdr.toLowerCase().contains("png") && !jpgHdr.equalsIgnoreCase("ffd8")) { throw new Exception("Tile image is neither PNG or JPG"); } // Check the tile reference is valid for the tile-matrix GpkgRecords matrix = getSystemTable(GpkgTileMatrix.TABLE_NAME).query(this, "table_name='"+tableName+"' AND zoom_level="+zoom); int w = matrix.getFieldInt(0, "matrix_width"); int h = matrix.getFieldInt(0, "matrix_height"); if (tileColumn > w || tileColumn < 1 || tileRow > h || tileRow < 1 || w==-1 || h==-1) { throw new Exception("Supplied tile reference is outside the scope of the tile matrix for "+tableName); } Map<String, Object> values = new HashMap<String, Object>(); values.put("zoom_level", zoom); values.put("tile_column", tileColumn); values.put("tile_row", tileRow); values.put("tile_data", tile); long recID = tilesTable.insert(this, values); if (recID>0) updateLastChange(tilesTable.getTableName(), tilesTable.getTableType()); return recID; } /** Check if a SRS with the supplied code is loaded in GpkgSpatialRefSys * * @param srsName The string value of the srs_name or srs_id * @return True if loaded, false if not or there was an error. */ public boolean isSRSLoaded(String srsName) { boolean loaded = false; int srsID = -2; try { srsID = Integer.parseInt( srsName ); } catch (NumberFormatException ignore) { // TODO Look-up the EPSG code number somehow? } String strWhere = srsID>-2 ? "srs_id="+srsID : "srs_name='"+srsName+"'"; try { ICursor cur = getSystemTable(GpkgSpatialRefSys.TABLE_NAME).query( this, new String[]{"srs_name"}, strWhere); loaded = cur.moveToNext(); cur.close(); } catch (Exception e) { e.printStackTrace(); } return loaded; } /** Create a {@linkplain FeaturesTable} from a {@link SimpleFeatureType} * * @param featureType The SimpleFeatureType to use. * @param tableExtents The extents for this table * @return The new FeaturesTable * @throws Exception If the supplied data is invalid or constraints are not * met (i.e No matching SRS definition in the gpkg_spatial_ref_sys table) */ public FeaturesTable createFeaturesTable(SimpleFeatureType featureType, BoundingBox tableExtents) throws Exception { FeaturesTable ft = new FeaturesTable( this, featureType.getTypeName()); ft.create( featureType, tableExtents ); return ft; } /** Add all {@link SimpleFeature}'s on the supplied collection into the GeoPackage as a batch. * If there are multiple feature types within the collection they are * automatically split to their corresponding tables. * The table name to insert into is taken from the local part of * the {@link FeatureType#getName()}. * * @param features * @return The number of records inserted * @throws Exception */ public int insertFeatures(Collection<SimpleFeature> features) throws Exception { /* Features within the collection could be different types, so split * in to seperate lists for batch insertion */ Map<Name, List<SimpleFeature>> typeList = new HashMap<Name, List<SimpleFeature>>(); for (SimpleFeature sf : features) { Name tName = sf.getType().getName(); List<SimpleFeature> thisType = typeList.get(tName); if (thisType==null) { thisType = new ArrayList<SimpleFeature>(); typeList.put(tName, thisType); } thisType.add(sf); } int numInserted = 0; FeaturesTable featTable = null; // For each set of feature's in our individual lists.. for (Map.Entry<Name, List<SimpleFeature>> e : typeList.entrySet()) { featTable = (FeaturesTable)getUserTable( e.getKey().getLocalPart(), GpkgTable.TABLE_TYPE_FEATURES ); List<Map<String, Object>> insertVals = new ArrayList<Map<String, Object>>(); Collection<GpkgField> tabFields = featTable.getFields(); // Get and check dimensional output int mOpt = featTable.getGeometryInfo().getMOption(); int zOpt = featTable.getGeometryInfo().getZOption(); int dimension = 2; if ( mOpt==Z_M_VALUES_MANDATORY || zOpt==Z_M_VALUES_MANDATORY || mOpt==Z_M_VALUES_OPTIONAL || zOpt==Z_M_VALUES_OPTIONAL) { dimension = 3; } if (mOpt==Z_M_VALUES_MANDATORY && zOpt==Z_M_VALUES_MANDATORY) throw new IllegalArgumentException("4 dimensional output is not supported"); // Build values for each feature of this type.. for (SimpleFeature sf : e.getValue()) { insertVals.add( buildInsertValues(sf, tabFields, dimension) ); } // Do the update on the table numInserted += featTable.insert(this, insertVals); insertVals = null; } typeList = null; if (numInserted>0) updateLastChange(featTable.getTableName(), featTable.getTableType()); return numInserted; } /** Insert a single {@link SimpleFeature} into the GeoPackage. * The table name to insert into is taken from the local part of * the {@link FeatureType#getName()}. * * @param feature The SimpleFeature to insert. * @return The RowID of the new record or -1 if not inserted * @throws Exception * @see {@link #insertFeatures(Collection)} for batch processing many features */ public long insertFeature(SimpleFeature feature) throws Exception { SimpleFeatureType type = feature.getType(); FeaturesTable featTable = (FeaturesTable)getUserTable( type.getName().getLocalPart(), GpkgTable.TABLE_TYPE_FEATURES ); // Get and check dimensional output int mOpt = featTable.getGeometryInfo().getMOption(); int zOpt = featTable.getGeometryInfo().getZOption(); int dimension = 2; if ( mOpt==Z_M_VALUES_MANDATORY || zOpt==Z_M_VALUES_MANDATORY || mOpt==Z_M_VALUES_OPTIONAL || zOpt==Z_M_VALUES_OPTIONAL) { dimension = 3; } if (mOpt==Z_M_VALUES_MANDATORY && zOpt==Z_M_VALUES_MANDATORY) throw new IllegalArgumentException("4 dimensional output is not supported"); Map<String, Object> values = buildInsertValues(feature, featTable.getFields(), dimension); long recID = featTable.insert(this, values); if (recID>0) updateLastChange(featTable.getTableName(), featTable.getTableType()); return recID; } /** Create a Map of field name to field value for inserting into a table. * * @param feature The {@link SimpleFeature} * @param tabFields The GeoPackage table fields to use for building the map. * @param geomDimension 2 or 3 for the Geomaetry ordinates/ * @return A Map * @throws IOException */ private Map<String, Object> buildInsertValues(SimpleFeature feature, Collection<GpkgField> tabFields, int geomDimension) throws IOException { // Construct values SimpleFeatureType type = feature.getType(); Map<String, Object> values = new HashMap<String, Object>(); Object value = null; FeatureField field = null; boolean passConstraint = true; // For each field defined in the table... for (GpkgField f : tabFields) { if (f.isPrimaryKey()) continue; // We can't update the PK! field = (FeatureField)f; // If defined as feature id, use getID, else find the attribute if ( field.isFeatureID() ) { value = feature.getID(); } else { int idx = type.indexOf( field.getFieldName() ); //This field isn't defined on the feature type, so can't insert value if (idx==-1 || idx > type.getAttributeCount()) continue; value = feature.getAttribute(idx); } passConstraint = true; // Check constraint if not a blob if (field.getMimeType()==null && field.getConstraint()!=null) { passConstraint = field.getConstraint().isValueValid( value ); } if(passConstraint) { if (value instanceof Geometry) { values.put(field.getFieldName(), encodeGeometry( (Geometry)value, geomDimension ) ); } else { values.put(field.getFieldName(), value); } } else { if (MODE_STRICT) { throw new IllegalArgumentException("Field "+field.getFieldName()+" did not pass constraint check"); } log.log(Level.WARNING, "Field "+field.getFieldName()+" did not pass constraint check; Inserting Null"); values.put(field.getFieldName(), null); } } return values; } /** Set a limit on the number of vertices permissible on a single geometry * when trying to insert new features. If the limit is exceeded then the * geometries are simplified using the supplied tolerance. * Default is no limit (-1) * * @param limitTo Max vertices * @param tolerance The tolerance to apply during simplification. This value * should be appropriate to the geometry's SRS */ public void setSimplifyOnInsertion(int limitTo, double tolerance) { this.MAX_VERTEX_LIMIT = limitTo; simpleTolerance = tolerance; } private double simpleTolerance = 1; /** Encode a JTS {@link Geometry} to standard GeoPackage geometry blob * * @param geom The Geometry to encode * @param outputDimension How many dimensions to write (2 or 3). JTS does not support 4 * @return * @throws IOException */ private byte[] encodeGeometry(Geometry geom, int outputDimension) throws IOException { if (geom==null) throw new IOException("Null Geometry passed"); if (outputDimension < 2 || outputDimension > 3) throw new IllegalArgumentException("Output dimension must be 2 or 3"); // Stop outrageous geometries from being encoded and inserted if (MAX_VERTEX_LIMIT>0) { int b4 = geom.getNumPoints(); if (b4 > MAX_VERTEX_LIMIT) { geom = DouglasPeuckerSimplifier.simplify(geom, simpleTolerance); geom.geometryChanged(); int af = geom.getNumPoints(); log.log(Level.WARNING, "Geometry Simplified for insertion: "+b4+" to "+af+" points"); } } ByteArrayOutputStream output = new ByteArrayOutputStream(); // 'Magic' and Version output.write( "GP".getBytes() ); output.write( GPKG_GEOM_HEADER_VERSION ); // Header flags int endianOrder = ByteOrderValues.BIG_ENDIAN; byte flags = 0; if (GPKG_GEOMETRY_LITTLE_ENDIAN) { flags = (byte) (flags | (1 << 0)); endianOrder = ByteOrderValues.LITTLE_ENDIAN; } if (!geom.getEnvelopeInternal().isNull()) { /* JTS Envelope geoms are only ever XY, not XYZ or XYZM * therefore we only ever set the 2nd bit to 1 */ flags = (byte) (flags | (1 << 1)); } if ( geom.isEmpty() ) { // Set envelope bit to 0 flags = (byte) (flags | (1 << 0)); // Flag the geometry is empty flags = (byte) (flags | (1 << 4)); } if (GPKG_GEOMETRY_STANDARD==false) { // ExtendedGeoPackageBinary encoding flags = (byte) (flags | (1 << 5)); } // Bits 7 and 8 are currently reserved and un-used output.write(flags); // SRS byte[] buffer = new byte[4]; ByteOrderValues.putInt(geom.getSRID(), buffer, endianOrder); output.write(buffer); Envelope envelope = geom.getEnvelopeInternal(); /* Geom envelope - JTS only supports 2 dimensional envelopes. If Geom is * empty then we don't encode an envelope */ if (!envelope.isNull() && !geom.isEmpty()) { buffer = new byte[8]; // Min X ByteOrderValues.putDouble(envelope.getMinX(), buffer, endianOrder); output.write(buffer); // Max X ByteOrderValues.putDouble(envelope.getMaxX(), buffer, endianOrder); output.write(buffer); // Min Y ByteOrderValues.putDouble(envelope.getMinY(), buffer, endianOrder); output.write(buffer); // Max Y ByteOrderValues.putDouble(envelope.getMaxY(), buffer, endianOrder); output.write(buffer); } // Write the geometry output.write( new OGCWKBWriter( outputDimension ).write(geom) ); buffer = output.toByteArray(); output.close(); return buffer; } /** Update last_change field in GpkgContents for the given table name and type * to 'now'. * * @param tableName * @param tableType */ private void updateLastChange(String tableName, String tableType) { Map<String, Object> values = new HashMap<String, Object>(); values.put("last_change", DateUtil.serializeDateTime(System.currentTimeMillis(), true) ); String where = String.format("table_name='%s' and data_type='%s'", tableName, tableType); getSystemTable(GpkgContents.TABLE_NAME).update(this, values, where); } /** Insert an OWS Context document correctly in to the GeoPackage.<p> * This method only allows for one Context Document within the GeoPackage. * * @param contextDoc Properly formatted document as a String (JSON or XML) * @param mimeType The encoding of the Context document (JSON or XML) * @param overwrite If <code>True</code> any current record is overwritten. If <code>False</code> * and there is an existing record then nothing is done and the method will * return <code>False</code>. * @return True if inserted/ updated successfully. */ public boolean insertOWSContext(String contextDoc, String mimeType, boolean overwrite) { if (contextDoc==null || contextDoc.equals("") || mimeType==null) return false; if ( !mimeType.equalsIgnoreCase("text/xml") && !mimeType.equalsIgnoreCase("application/xml") && !mimeType.equalsIgnoreCase("application/json") ) { throw new IllegalArgumentException("Incorrect mimeType specified"); } // Do we have an existing record? GpkgTable md = getSystemTable(GpkgMetaData.TABLE_NAME); ICursor c = md.query(this, new String[]{"id"}, "md_standard_uri='http://www.opengis.net/owc/1.0'"); int cID = -1; if (c.moveToNext()) { cID = c.getInt(0); c.close(); } Map<String, Object> values = new HashMap<String, Object>(); values.put("md_scope", GpkgMetaData.SCOPE_UNDEFINED); values.put("md_standard_uri", "http://www.opengis.net/owc/1.0"); values.put("mime_type", mimeType); values.put("metadata", contextDoc); boolean updated = false; long mdRec = -1, mdrRec = -1; if (overwrite && cID > -1) { updated = md.update(this, values, "id="+cID) > 0; } else if (cID > -1) { // Don't overwrite, but has record so return false return false; } else if (cID==-1) { // No record, so insert mdRec = getSystemTable(GpkgMetaData.TABLE_NAME).insert(this, values); } // Didn't insert or update if (mdRec==-1 && updated==false) return false; values.clear(); if (updated) { // Update timestamp values.put("timestamp", DateUtil.serializeDateTime(System.currentTimeMillis(), true) ); mdrRec = getSystemTable(GpkgMetaDataReference.TABLE_NAME).update(this, values, "md_file_id="+cID); } else { values.put("reference_scope", GpkgMetaDataReference.SCOPE_GEOPACKAGE); values.put("md_file_id", mdRec); mdrRec = getSystemTable(GpkgMetaDataReference.TABLE_NAME).insert(this, values); } // Rollback GpkgMetaData if reference not inserted if (mdrRec < 1) { getSystemTable(GpkgMetaData.TABLE_NAME).delete(this, "id="+mdRec); } return mdrRec > -1; } /** Get the String representation of an OWS Context Document from the GeoPackage.<p> * Only the first Context Document within the GeoPackage is returned * ( as defined by md_standard_uri='http://www.opengis.net/owc/1.0' ) * * @return String[] The first entry is the Context mime-type, the second is the * String representation of the document. */ public String[] getOWSContext() { String[] ret = new String[2]; ICursor c = getSystemTable(GpkgMetaData.TABLE_NAME).query( this, new String[]{"mime_type", "metadata"}, "md_standard_uri='http://www.opengis.net/owc/1.0'"); if(c.moveToFirst()) { ret = new String[] {c.getString(0), c.getString(1)}; } c.close(); return ret; } /** Add a new constraint to the GeoPackage that can be referenced, using the same constraint_name, * from gpkg_data_columns.<p> * * The constraint must be created before a record that uses it is inserted into gpkg_data_columns, therefore * constraint names specified on {@link AttributeType}'s via the user-data must be added through this * method prior to passing the attribute definitions to * {@link #createFeatureTable(String, String, List, List, BoundingBox, String, boolean)} * with dataColumns set to True.<p> * * Any existing constraint(s) in the GeoPackage with the same name are updated (delete-insert).<p> * * @param tableName The name of the table to apply this constraint to. * @param columnNames An array of column names to apply this constrain to, WRT the tableName * @param dcConstraint {@link DataColumnConstraint} * */ public long addDataColumnConstraint(String tableName, String[] columnNames, DataColumnConstraint dcConstraint) { if (dcConstraint==null || dcConstraint.isValid()==false) return -1L; GpkgDataColumnConstraint dcc = new GpkgDataColumnConstraint(); DataColumnConstraint existingDCC = dcc.getConstraint(this, dcConstraint.constraintName); if (existingDCC!=null) { if (existingDCC.constraintType.equals(GpkgDataColumnConstraint.TYPE_ENUM)) { /* Do we want to delete everything and re-insert, or check and update? * Currently delete everything and re-insert */ dcc.delete(this, "constraint_name='"+dcConstraint.constraintName+"'"); } else { dcc.delete(this, "constraint_name='"+dcConstraint.constraintName+"'"); } } // Insert the constraint long newRec = dcc.insert(this, dcConstraint.toMap()); // Didn't insert/ update so don't update feature table if (newRec ==-1) return -1L; // Update GpkgDataColumns for the specified columns Map<String, Object> vals = null; GpkgTable sys = getSystemTable(GpkgDataColumns.TABLE_NAME); for (String col : columnNames) { vals = new HashMap<String, Object>(); vals.put("constraint_name", dcConstraint.constraintName); sys.update(this, vals, "table_name='"+tableName+"' AND column_name='"+col+"';"); } return newRec; } /** Check that the supplied geometry type name is valid for a GeoPackage * * @param geomDescriptor The GeometryDescriptor to check from. * @return True if its valid */ public boolean isGeomTypeValid(GeometryDescriptor geomDescriptor) { String geomType = geomDescriptor.getType().getName().getLocalPart().toLowerCase(); if (geomType.equals("geometry")) { return true; } else if (geomType.equals("point")) { return true; } else if (geomType.equals("linestring")) { return true; } else if (geomType.equals("polygon")) { return true; } else if (geomType.equals("multipoint")) { return true; } else if (geomType.equals("multilinestring")) { return true; } else if (geomType.equals("multipolygon")) { return true; } else if (geomType.equals("geomcollection")) { return true; } else { return false; } } /** Encode basic Java types to those permissible in a GeoPackage * * @param object The object value to decode * @return A String usable for a table definition data type. Defaults to TEXT for * any unknown class or Object */ public static String encodeType(Class<?> clazz) { String name = clazz.getSimpleName().toLowerCase(); if (name.equals("integer") || name.equals("int")) { return "INTEGER"; } else if (name.equals("string")) { return "TEXT"; } else if (name.equals("boolean") || name.equals("byte")) { return "BOOL"; } else if (name.equals("double") || name.equals("float")) { return "REAL"; } else if (name.equals("long")) { return "INTEGER"; } else if (name.equals("geometry") || name.equals("byte[]")) { return "BLOB"; } return "TEXT"; } /** Decode SQLite data types to Java classes * * @param sqlType * @return */ public Class<?> decodeType(String sqlType) { JavaType jType = sqlTypeMap.get(sqlType.toLowerCase()); if (jType==null || jType==JavaType.UNKNOWN) throw new IllegalArgumentException("Unknown SQL data type '"+sqlType+"'"); switch (jType) { case INTEGER: return Integer.class; case STRING: return String.class; case BOOLEAN: return Boolean.class; case FLOAT: return Float.class; case DOUBLE: return Double.class; case BYTE_ARR: return Byte[].class; } return String.class; } }
Fixed bug inserting features When features were being inserted and an attribute didn't exist the value insertion was skipped, compared to inserting Null, causing values to go into the wrong fields
AugTech_GeoAPI_Impl/com/augtech/geoapi/geopackage/GeoPackage.java
Fixed bug inserting features
<ide><path>ugTech_GeoAPI_Impl/com/augtech/geoapi/geopackage/GeoPackage.java <ide> * If there are multiple feature types within the collection they are <ide> * automatically split to their corresponding tables. <ide> * The table name to insert into is taken from the local part of <del> * the {@link FeatureType#getName()}. <add> * the {@link FeatureType#getName()}.<p> <add> * The relevant tables must already exist in the GeoPackage. <ide> * <ide> * @param features <ide> * @return The number of records inserted <ide> <ide> /* Features within the collection could be different types, so split <ide> * in to seperate lists for batch insertion */ <del> Map<Name, List<SimpleFeature>> typeList = new HashMap<Name, List<SimpleFeature>>(); <add> Map<Name, List<SimpleFeature>> sfByType = new HashMap<Name, List<SimpleFeature>>(); <ide> for (SimpleFeature sf : features) { <ide> Name tName = sf.getType().getName(); <del> List<SimpleFeature> thisType = typeList.get(tName); <add> List<SimpleFeature> thisType = sfByType.get(tName); <ide> <ide> if (thisType==null) { <ide> thisType = new ArrayList<SimpleFeature>(); <del> typeList.put(tName, thisType); <add> sfByType.put(tName, thisType); <ide> } <ide> thisType.add(sf); <ide> <ide> FeaturesTable featTable = null; <ide> <ide> // For each set of feature's in our individual lists.. <del> for (Map.Entry<Name, List<SimpleFeature>> e : typeList.entrySet()) { <add> for (Map.Entry<Name, List<SimpleFeature>> e : sfByType.entrySet()) { <ide> <ide> featTable = (FeaturesTable)getUserTable( <ide> e.getKey().getLocalPart(), GpkgTable.TABLE_TYPE_FEATURES ); <ide> insertVals = null; <ide> } <ide> <del> typeList = null; <add> sfByType = null; <ide> <ide> if (numInserted>0) updateLastChange(featTable.getTableName(), featTable.getTableType()); <ide> <ide> value = feature.getID(); <ide> } else { <ide> int idx = type.indexOf( field.getFieldName() ); <del> //This field isn't defined on the feature type, so can't insert value <del> if (idx==-1 || idx > type.getAttributeCount()) continue; <del> <del> value = feature.getAttribute(idx); <add> /* If the field is not available on the type, set to null to ensure <add> * the value list matches the table definition */ <add> if (idx==-1 || idx > type.getAttributeCount()) { <add> value = null; <add> } else { <add> value = feature.getAttribute(idx); <add> } <ide> } <ide> <ide> passConstraint = true;
Java
apache-2.0
ba2ba2f1d00d12f1fcaac6454eea1292a1e4b318
0
spring-cloud/spring-cloud-dataflow-admin-cloudfoundry,spring-cloud/spring-cloud-dataflow-admin-cloudfoundry,spring-cloud/spring-cloud-dataflow-server-cloudfoundry,spring-cloud/spring-cloud-dataflow-admin-cloudfoundry,spring-cloud/spring-cloud-dataflow-server-cloudfoundry,spring-cloud/spring-cloud-dataflow-server-cloudfoundry
/* * Copyright 2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.dataflow.server.cloudfoundry.resource; import java.io.File; import java.io.IOException; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.cloud.deployer.resource.support.DelegatingResourceLoader; import org.springframework.core.io.Resource; import org.springframework.core.io.ResourceLoader; import org.springframework.util.Assert; import org.springframework.util.FileSystemUtils; /** * A wrapper around a {@link org.springframework.core.io.ResourceLoader} that deletes returned Resources (assumed to * be on the file system) once disk space is getting low. Least Recently Used entries are removed first. * * <p>This wrapper is typically meant to be used to clean Maven {@literal .m2/repository} entries, but also works * with other files. For the former case, if entries are under the configured {@link #repositoryCache} path (typically * the {@literal .m2/repository} path), then the whole parent directory of the resource is removed. Otherwise, the sole * resource file is deleted.</p> * * @author Eric Bottard */ // NOTE: extends DelegatingResourceLoader as a // work around https://github.com/spring-cloud/spring-cloud-dataflow/issues/1064 for now /*default*/ class LRUCleaningResourceLoader extends DelegatingResourceLoader { private static final Logger logger = LoggerFactory.getLogger(LRUCleaningResourceLoader.class); private final File repositoryCache; private final ResourceLoader delegate; private final Map<File, Void> lruCache = this.new LRUCache(); private final float targetFreeSpaceRatio; /** * Instantiates a new LRUCleaning resource loader. * @param delegate the ResourceLoader to wrap, assumed to be file system based. * @param targetFreeSpaceRatio The target free disk space ratio, between 0 and 1. * @param repositoryCache The directory location of the maven cache. */ public LRUCleaningResourceLoader(ResourceLoader delegate, float targetFreeSpaceRatio, File repositoryCache) { Assert.notNull(delegate, "delegate cannot be null"); Assert.isTrue(0 <= targetFreeSpaceRatio && targetFreeSpaceRatio <= 1, "targetFreeSpaceRatio should between [0,1] inclusive."); this.delegate = delegate; this.targetFreeSpaceRatio = targetFreeSpaceRatio; this.repositoryCache = repositoryCache; } @Override public Resource getResource(String location) { Resource resource = delegate.getResource(location); try { File file = resource.getFile(); synchronized (lruCache) { lruCache.put(file, null); } return resource; } catch (IOException e) { throw new RuntimeException(getClass().getSimpleName() + " is meant to work with File resolvable Resources. Exception trying to resolve " + location, e); } } @Override public ClassLoader getClassLoader() { return delegate.getClassLoader(); } private class LRUCache extends LinkedHashMap<File, Void> { LRUCache() { super(5, .75f, true); } @Override protected boolean removeEldestEntry(Map.Entry<File, Void> eldest) { for (Iterator<File> it = keySet().iterator(); it.hasNext(); ) { File file = it.next(); logger.info("Looking at {}, {} / {} = {}% free space", file, file.getFreeSpace(), file.getTotalSpace(), 100f * file.getFreeSpace() / file.getTotalSpace()); if (shouldDelete(file) && it.hasNext()) { // never delete the most recent entry cleanup(file); it.remove(); } } return false; // We already did some cleanup, don't let superclass do its logic } private void cleanup(File file) { if (repositoryCache != null && file.getPath().startsWith(repositoryCache.getPath())) { boolean success = FileSystemUtils.deleteRecursively(file.getParentFile()); logger.debug("[{}] Deleting {} parent directory to regain free space {}", success ? "SUCCESS" : "FAILED", file); } else { boolean success = file.delete(); logger.debug("[{}] Deleting {} to regain free space", success ? "SUCCESS" : "FAILED", file); } } } private boolean shouldDelete(File file) { return ((float) file.getFreeSpace()) / file.getTotalSpace() < targetFreeSpaceRatio; } }
spring-cloud-dataflow-server-cloudfoundry-autoconfig/src/main/java/org/springframework/cloud/dataflow/server/cloudfoundry/resource/LRUCleaningResourceLoader.java
/* * Copyright 2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.dataflow.server.cloudfoundry.resource; import java.io.File; import java.io.IOException; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.cloud.deployer.resource.support.DelegatingResourceLoader; import org.springframework.core.io.Resource; import org.springframework.core.io.ResourceLoader; import org.springframework.util.Assert; import org.springframework.util.FileSystemUtils; /** * A wrapper around a {@link org.springframework.core.io.ResourceLoader} that deletes returned Resources (assumed to * be on the file system) once disk space is getting low. Least Recently Used entries are removed first. * * <p>This wrapper is typically meant to be used to clean Maven {@literal .m2/repository} entries, but also works * with other files. For the former case, if entries are under the configured {@link #repositoryCache} path (typically * the {@literal .m2/repository} path), then the whole parent directory of the resource is removed. Otherwise, the sole * resource file is deleted.</p> * * @author Eric Bottard */ // NOTE: extends DelegatingResourceLoader as a // work around https://github.com/spring-cloud/spring-cloud-dataflow/issues/1064 for now /*default*/ class LRUCleaningResourceLoader extends DelegatingResourceLoader { private static final Logger logger = LoggerFactory.getLogger(LRUCleaningResourceLoader.class); private final File repositoryCache; private final ResourceLoader delegate; private final Map<File, Void> lruCache = this.new LRUCache(); private final float targetFreeSpaceRatio; /** * Instantiates a new LRUCleaning resource loader. * @param delegate the ResourceLoader to wrap, assumed to be file system based. * @param targetFreeSpaceRatio The target free disk space ratio, between 0 and 1. * @param repositoryCache The directory location of the maven cache. */ public LRUCleaningResourceLoader(ResourceLoader delegate, float targetFreeSpaceRatio, File repositoryCache) { Assert.notNull(delegate, "delegate cannot be null"); Assert.isTrue(0 < targetFreeSpaceRatio && targetFreeSpaceRatio < 1, "targetFreeSpaceRatio should be between 0 and 1"); this.delegate = delegate; this.targetFreeSpaceRatio = targetFreeSpaceRatio; this.repositoryCache = repositoryCache; } @Override public Resource getResource(String location) { Resource resource = delegate.getResource(location); try { File file = resource.getFile(); synchronized (lruCache) { lruCache.put(file, null); } return resource; } catch (IOException e) { throw new RuntimeException(getClass().getSimpleName() + " is meant to work with File resolvable Resources. Exception trying to resolve " + location, e); } } @Override public ClassLoader getClassLoader() { return delegate.getClassLoader(); } private class LRUCache extends LinkedHashMap<File, Void> { LRUCache() { super(5, .75f, true); } @Override protected boolean removeEldestEntry(Map.Entry<File, Void> eldest) { for (Iterator<File> it = keySet().iterator(); it.hasNext(); ) { File file = it.next(); logger.info("Looking at {}, {} / {} = {}% free space", file, file.getFreeSpace(), file.getTotalSpace(), 100f * file.getFreeSpace() / file.getTotalSpace()); if (shouldDelete(file) && it.hasNext()) { // never delete the most recent entry cleanup(file); it.remove(); } } return false; // We already did some cleanup, don't let superclass do its logic } private void cleanup(File file) { if (repositoryCache != null && file.getPath().startsWith(repositoryCache.getPath())) { boolean success = FileSystemUtils.deleteRecursively(file.getParentFile()); logger.debug("[{}] Deleting {} parent directory to regain free space {}", success ? "SUCCESS" : "FAILED", file); } else { boolean success = file.delete(); logger.debug("[{}] Deleting {} to regain free space", success ? "SUCCESS" : "FAILED", file); } } } private boolean shouldDelete(File file) { return ((float) file.getFreeSpace()) / file.getTotalSpace() < targetFreeSpaceRatio; } }
Change assertion to be inclusive of 0 and 1 for free disk percentage Fixes #244
spring-cloud-dataflow-server-cloudfoundry-autoconfig/src/main/java/org/springframework/cloud/dataflow/server/cloudfoundry/resource/LRUCleaningResourceLoader.java
Change assertion to be inclusive of 0 and 1 for free disk percentage
<ide><path>pring-cloud-dataflow-server-cloudfoundry-autoconfig/src/main/java/org/springframework/cloud/dataflow/server/cloudfoundry/resource/LRUCleaningResourceLoader.java <ide> */ <ide> public LRUCleaningResourceLoader(ResourceLoader delegate, float targetFreeSpaceRatio, File repositoryCache) { <ide> Assert.notNull(delegate, "delegate cannot be null"); <del> Assert.isTrue(0 < targetFreeSpaceRatio && targetFreeSpaceRatio < 1, "targetFreeSpaceRatio should be between 0 and 1"); <add> Assert.isTrue(0 <= targetFreeSpaceRatio && targetFreeSpaceRatio <= 1, "targetFreeSpaceRatio should between [0,1] inclusive."); <ide> this.delegate = delegate; <ide> this.targetFreeSpaceRatio = targetFreeSpaceRatio; <ide> this.repositoryCache = repositoryCache;
Java
apache-2.0
cd86fbf2b2a1ebb174f9984ab946b12e94f2ea81
0
kduretec/TestDataGenerator
package benchmarkdp.datagenerator.app; import java.io.File; import java.io.IOException; import org.apache.commons.io.FileUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import benchmarkdp.datagenerator.generator.utils.ZipUtil; import benchmarkdp.datagenerator.properties.ExperimentProperties; import benchmarkdp.datagenerator.testcase.TestCaseContainer; import benchmarkdp.datagenerator.workflow.IWorkflowStep; public class ToolEvaluatorStep implements IWorkflowStep { private static Logger log = LoggerFactory.getLogger(ToolEvaluatorStep.class); private static String COM_FOLDER_TO = "/Users/kresimir/Mount/Hephaistos/Experiments/TaskIn"; private static String COM_FOLDER_FROM = "/Users/kresimir/Mount/Hephaistos/Experiments/TaskOut"; @Override public void executeStep(ExperimentProperties ep, TestCaseContainer tCC) { log.info("Tool Evaluator step"); if (ep.getExperimentState().compareTo("TEST_CASES_FINALIZED") == 0) { copyToEvaluation(ep, tCC); } else if (ep.getExperimentState().compareTo("TEST_CASES_SEND_TO_EVALUATION") == 0) { } } @Override public IWorkflowStep nextStep() { // TODO Auto-generated method stub return null; } private void copyToEvaluation(ExperimentProperties ep, TestCaseContainer tCC) { log.info("Sending documents to evaluation"); try { String experimentName = ep.getExperimentName(); String pathTmp = ep.getFullFolderPath() + "/tmp/" + experimentName; File f = new File(pathTmp); if (!f.exists()) { f.mkdirs(); } String zipFolder = pathTmp; pathTmp = pathTmp + "/" + experimentName; File sDocs = new File(ep.getFullFolderPath() + ep.getDocumentFolder()); File dDocs = new File(pathTmp + "/Documents"); FileUtils.copyDirectory(sDocs, dDocs); File sText = new File(ep.getFullFolderPath() + ep.getTextFolder()); File dText = new File(pathTmp + "/GroundTruth/Text"); FileUtils.copyDirectory(sText, dText); File sMet = new File(ep.getFullFolderPath() + ep.getMetadataFolder()); File dMet = new File(pathTmp + "/GroundTruth/Metadata"); FileUtils.copyDirectory(sMet, dMet); File propFile = new File (ep.getFullFolderPath() + "properties.xml"); FileUtils.copyFileToDirectory(propFile, f); File tcFile = new File (ep.getFullFolderPath() + "testCases.xml"); FileUtils.copyFileToDirectory(tcFile, f); ZipUtil.zipFolder(zipFolder, COM_FOLDER_TO, ep.getExperimentName()); //FileUtils.deleteDirectory(new File(ep.getFullFolderPath() + "/tmp")); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }
DataGenerator/src/benchmarkdp/datagenerator/app/ToolEvaluatorStep.java
package benchmarkdp.datagenerator.app; import java.io.File; import java.io.IOException; import org.apache.commons.io.FileUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import benchmarkdp.datagenerator.generator.utils.ZipUtil; import benchmarkdp.datagenerator.properties.ExperimentProperties; import benchmarkdp.datagenerator.testcase.TestCaseContainer; import benchmarkdp.datagenerator.workflow.IWorkflowStep; public class ToolEvaluatorStep implements IWorkflowStep { private static Logger log = LoggerFactory.getLogger(ToolEvaluatorStep.class); private static String COM_FOLDER_TO = "/Users/kresimir/Mount/Hephaistos/Experiments/TaskIn"; private static String COM_FOLDER_FROM = "/Users/kresimir/Mount/Hephaistos/Experiments/TaskOut"; @Override public void executeStep(ExperimentProperties ep, TestCaseContainer tCC) { log.info("Tool Evaluator step"); if (ep.getExperimentState().compareTo("TEST_CASES_FINALIZED") == 0) { copyToEvaluation(ep, tCC); } else if (ep.getExperimentState().compareTo("TEST_CASES_SEND_TO_EVALUATION") == 0) { } } @Override public IWorkflowStep nextStep() { // TODO Auto-generated method stub return null; } private void copyToEvaluation(ExperimentProperties ep, TestCaseContainer tCC) { log.info("Sending documents to evaluation"); try { String experimentName = ep.getExperimentName(); String pathTmp = ep.getFullFolderPath() + "/tmp/" + experimentName; File f = new File(pathTmp); if (!f.exists()) { f.mkdirs(); } File sDocs = new File(ep.getFullFolderPath() + ep.getDocumentFolder()); File dDocs = new File(pathTmp + "/Documents"); FileUtils.copyDirectory(sDocs, dDocs); File sText = new File(ep.getFullFolderPath() + ep.getTextFolder()); File dText = new File(pathTmp + "/GroundTruth/Text"); FileUtils.copyDirectory(sText, dText); File sMet = new File(ep.getFullFolderPath() + ep.getMetadataFolder()); File dMet = new File(pathTmp + "/GroundTruth/Metadata"); FileUtils.copyDirectory(sMet, dMet); File propFile = new File (ep.getFullFolderPath() + "properties.xml"); FileUtils.copyFileToDirectory(propFile, f); File tcFile = new File (ep.getFullFolderPath() + "testCases.xml"); FileUtils.copyFileToDirectory(tcFile, f); ZipUtil.zipFolder(pathTmp, COM_FOLDER_TO, ep.getExperimentName()); FileUtils.deleteDirectory(new File(ep.getFullFolderPath() + "/tmp")); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }
small updates
DataGenerator/src/benchmarkdp/datagenerator/app/ToolEvaluatorStep.java
small updates
<ide><path>ataGenerator/src/benchmarkdp/datagenerator/app/ToolEvaluatorStep.java <ide> if (!f.exists()) { <ide> f.mkdirs(); <ide> } <add> String zipFolder = pathTmp; <add> pathTmp = pathTmp + "/" + experimentName; <ide> File sDocs = new File(ep.getFullFolderPath() + ep.getDocumentFolder()); <ide> File dDocs = new File(pathTmp + "/Documents"); <ide> FileUtils.copyDirectory(sDocs, dDocs); <ide> File tcFile = new File (ep.getFullFolderPath() + "testCases.xml"); <ide> FileUtils.copyFileToDirectory(tcFile, f); <ide> <del> ZipUtil.zipFolder(pathTmp, COM_FOLDER_TO, ep.getExperimentName()); <del> FileUtils.deleteDirectory(new File(ep.getFullFolderPath() + "/tmp")); <add> ZipUtil.zipFolder(zipFolder, COM_FOLDER_TO, ep.getExperimentName()); <add> //FileUtils.deleteDirectory(new File(ep.getFullFolderPath() + "/tmp")); <ide> <ide> } catch (IOException e) { <ide> // TODO Auto-generated catch block
JavaScript
bsd-2-clause
268e74b13ed5ea4c330a367b9f429b74c7d12dcd
0
t2ym/thin-polymer,t2ym/thin-polymer
/* @license https://github.com/t2ym/thin-polymer/blob/master/LICENSE.md Copyright (c) 2016, Tetsuya Mori <[email protected]>. All rights reserved. */ (function () { function UncamelCase (name) { return name // insert a hyphen between lower & upper .replace(/([a-z0-9])([A-Z])/g, '$1 $2') // space before last upper in a sequence followed by lower .replace(/\b([A-Z]+)([A-Z])([a-z0-9])/, '$1 $2$3') // replace spaces with hyphens .replace(/ /g, '-') // lowercase .toLowerCase(); } function functionName (func) { return typeof func === 'function' ? func.toString().replace(/^[\S\s]*?function\s*/, "").replace(/[\s\(\/][\S\s]+$/, "") : undefined; } if (!window.Prototype) { Object.defineProperty(window, 'Prototype', { get: function () { return function (id) { return this.PolymerElements ? this.PolymerElements[id] : undefined; }; }, set: function (proto) { /* Patterns: a) template id {} b) template id {is} c) document.template id {is} d) template {is} e) {is} f) class Is {template} g) {is,template} */ var id; var classId; var obj; var name = proto.name || functionName(proto); var current; // currentScript var template = null; var previous; // previousSibling template var cousin; // dom5.serialize output support current = Polymer.Settings.useNativeImports ? document.currentScript : document._currentScript; previous = current.previousSibling; while (previous && !previous.tagName) { previous = previous.previousSibling; } if (previous && previous.tagName !== 'template'.toUpperCase()) { previous = null; } if (!previous) { // search for cousin template if (current.parentNode.tagName === 'body'.toUpperCase()) { previous = current.parentNode.previousSibling; while (previous && !previous.tagName) { previous = previous.previousSibling; } if (previous && previous.tagName.toLowerCase() === 'head') { for (var i = 0; i < previous.childNodes.length; i++) { if (previous.childNodes[i].tagName === 'template'.toUpperCase()) { cousin = previous.childNodes[i]; break; } } } } if (cousin) { previous = cousin; } else { previous = null; } } if (!proto.is && (!name || name === 'class' || name === 'Prototype')) { if (previous) { id = previous.id; if (id) { // Pattern a) template = previous; proto.is = id; } } } else { if (proto.is) { id = proto.is; } else if (typeof proto === 'function' && name) { // ES6 class id = UncamelCase(name); classId = name; obj = new proto(); if (obj.template) { // Pattern f) template = document.createElement('template'); template.innerHTML = obj.template; var children = Array.prototype.filter.call(template.content.childNodes, function (node) { return node.tagName; }); var topChild = children.length === 1 ? children[0] : undefined; if (topChild && topChild.tagName.toLowerCase() === 'template') { template = topChild; } } obj.is = id; Object.getOwnPropertyNames(obj.__proto__).forEach(function (prop) { obj[prop] = obj.__proto__[prop]; }); proto = obj; } if (!template && proto.template) { // Pattern g) template = document.createElement('template'); template.innerHTML = proto.template; } if (!template) { // Pattern b), c) template = current.ownerDocument .querySelector('template[id=' + id + ']') || document.querySelector('template[id=' + id + ']'); } if (!template && previous && !previous.id) { // Pattern d) template = previous; template.id = id; } else { // Pattern e) } } if (!id) { throw 'Custom element name is not defined'; } // register dom-module if (template) { var domModule = document.createElement('dom-module'); var assetUrl = new URL(current.baseURI || window.currentImport.baseURI); domModule.appendChild(template); domModule.setAttribute('assetpath', assetUrl.pathname.indexOf('.vulcanized.') < 0 ? assetUrl.pathname : template.hasAttribute('assetpath') ? template.getAttribute('assetpath') : assetUrl.pathname); domModule.register(id); } // register Polymer element this.PolymerElements = this.PolymerElements || {}; classId = classId || id.split('-').map(function (word) { return word[0].toUpperCase() + word.substr(1); }).join(''); var PrototypeGeneratorName = 'Polymer'; // to pass jshint if (this.PolymerElements[id]) { console.warn('Discarding duplicate regitration of custom element ' + id); } else { this.PolymerElements[id] = window[PrototypeGeneratorName](proto); // to pass strip this.PolymerElements[classId] = this.PolymerElements[id]; } return this.PolymerElements[id]; } }); } })();
thin-polymer.js
/* @license https://github.com/t2ym/thin-polymer/blob/master/LICENSE.md Copyright (c) 2016, Tetsuya Mori <[email protected]>. All rights reserved. */ (function () { function UncamelCase (name) { return name // insert a hyphen between lower & upper .replace(/([a-z0-9])([A-Z])/g, '$1 $2') // space before last upper in a sequence followed by lower .replace(/\b([A-Z]+)([A-Z])([a-z0-9])/, '$1 $2$3') // replace spaces with hyphens .replace(/ /g, '-') // lowercase .toLowerCase(); } function functionName (func) { return typeof func === 'function' ? func.toString().replace(/^[\S\s]*?function\s*/, "").replace(/[\s\(\/][\S\s]+$/, "") : undefined; } if (!window.Prototype) { Object.defineProperty(window, 'Prototype', { get: function () { return function (id) { return this.PolymerElements ? this.PolymerElements[id] : undefined; }; }, set: function (proto) { /* Patterns: a) template id {} b) template id {is} c) document.template id {is} d) template {is} e) {is} f) class Is {template} g) {is,template} */ var id; var classId; var name = proto.name || functionName(proto); var current; // currentScript var template = null; var previous; // previousSibling template var cousin; // dom5.serialize output support current = Polymer.Settings.useNativeImports ? document.currentScript : document._currentScript; previous = current.previousSibling; while (previous && !previous.tagName) { previous = previous.previousSibling; } if (previous && previous.tagName !== 'template'.toUpperCase()) { previous = null; } if (!previous) { // search for cousin template if (current.parentNode.tagName === 'body'.toUpperCase()) { previous = current.parentNode.previousSibling; while (previous && !previous.tagName) { previous = previous.previousSibling; } if (previous && previous.tagName.toLowerCase() === 'head') { for (var i = 0; i < previous.childNodes.length; i++) { if (previous.childNodes[i].tagName === 'template'.toUpperCase()) { cousin = previous.childNodes[i]; break; } } } } if (cousin) { previous = cousin; } else { previous = null; } } if (!proto.is && !name) { if (previous) { id = previous.id; if (id) { // Pattern a) template = previous; proto.is = id; } } } else { if (proto.is) { id = proto.is; } else if (typeof proto === 'function' && name) { // ES6 class id = UncamelCase(name); classId = name; proto = new proto(); if (proto.template) { // Pattern f) template = document.createElement('template'); template.innerHTML = proto.template; var children = Array.prototype.filter.call(template.content.childNodes, function (node) { return node.tagName; }); var topChild = children.length === 1 ? children[0] : undefined; if (topChild && topChild.tagName.toLowerCase() === 'template') { template = topChild; } } proto = proto.__proto__; proto.is = id; } if (!template && proto.template) { // Pattern g) template = document.createElement('template'); template.innerHTML = proto.template; } if (!template) { // Pattern b), c) template = current.ownerDocument .querySelector('template[id=' + id + ']') || document.querySelector('template[id=' + id + ']'); } if (!template && previous && !previous.id) { // Pattern d) template = previous; template.id = id; } else { // Pattern e) } } if (!id) { throw 'Custom element name is not defined'; } // register dom-module if (template) { var domModule = document.createElement('dom-module'); var assetUrl = new URL(current.baseURI || window.currentImport.baseURI); domModule.appendChild(template); domModule.setAttribute('assetpath', assetUrl.pathname.indexOf('.vulcanized.') < 0 ? assetUrl.pathname : template.hasAttribute('assetpath') ? template.getAttribute('assetpath') : assetUrl.pathname); domModule.register(id); } // register Polymer element this.PolymerElements = this.PolymerElements || {}; classId = classId || id.split('-').map(function (word) { return word[0].toUpperCase() + word.substr(1); }).join(''); var PrototypeGeneratorName = 'Polymer'; // to pass jshint if (this.PolymerElements[id]) { console.warn('Discarding duplicate regitration of custom element ' + id); } else { this.PolymerElements[id] = window[PrototypeGeneratorName](proto); // to pass strip this.PolymerElements[classId] = this.PolymerElements[id]; } return this.PolymerElements[id]; } }); } })();
Copy properties of prototype
thin-polymer.js
Copy properties of prototype
<ide><path>hin-polymer.js <ide> */ <ide> var id; <ide> var classId; <add> var obj; <ide> var name = proto.name || functionName(proto); <ide> var current; // currentScript <ide> var template = null; <ide> } <ide> } <ide> <del> if (!proto.is && !name) { <add> if (!proto.is && (!name || name === 'class' || name === 'Prototype')) { <ide> if (previous) { <ide> id = previous.id; <ide> if (id) { <ide> // ES6 class <ide> id = UncamelCase(name); <ide> classId = name; <del> proto = new proto(); <del> if (proto.template) { <add> obj = new proto(); <add> if (obj.template) { <ide> // Pattern f) <ide> template = document.createElement('template'); <del> template.innerHTML = proto.template; <add> template.innerHTML = obj.template; <ide> var children = Array.prototype.filter.call(template.content.childNodes, <ide> function (node) { return node.tagName; }); <ide> var topChild = children.length === 1 ? children[0] : undefined; <ide> template = topChild; <ide> } <ide> } <del> proto = proto.__proto__; <del> proto.is = id; <add> obj.is = id; <add> Object.getOwnPropertyNames(obj.__proto__).forEach(function (prop) { <add> obj[prop] = obj.__proto__[prop]; <add> }); <add> proto = obj; <ide> } <ide> if (!template && proto.template) { <ide> // Pattern g)
Java
apache-2.0
c04ae392e81e5ad840cf4f5295d0d31cf6377ef2
0
mybatis/guice,hazendaz/guice,johnzeringue/guice,WilliamRen/guice
/* * Copyright 2010 The myBatis Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.mybatis.guice; import static org.mybatis.guice.iterables.Iterables.foreach; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; import java.util.Map.Entry; import javax.sql.DataSource; import org.apache.ibatis.io.ResolverUtil; import org.apache.ibatis.mapping.Environment; import org.apache.ibatis.plugin.Interceptor; import org.apache.ibatis.reflection.factory.DefaultObjectFactory; import org.apache.ibatis.reflection.factory.ObjectFactory; import org.apache.ibatis.session.Configuration; import org.apache.ibatis.session.SqlSessionFactory; import org.apache.ibatis.transaction.TransactionFactory; import org.apache.ibatis.transaction.jdbc.JdbcTransactionFactory; import org.apache.ibatis.type.TypeHandler; import org.mybatis.guice.configuration.ConfigurationProvider; import org.mybatis.guice.configuration.Mappers; import org.mybatis.guice.configuration.TypeAliases; import org.mybatis.guice.datasource.builtin.UnpooledDataSourceProvider; import org.mybatis.guice.environment.EnvironmentProvider; import org.mybatis.guice.iterables.Each; import org.mybatis.guice.session.SqlSessionFactoryProvider; import com.google.inject.Module; import com.google.inject.Provider; import com.google.inject.Scopes; import com.google.inject.TypeLiteral; import com.google.inject.multibindings.MapBinder; import com.google.inject.multibindings.Multibinder; /** * Easy to use helper Module that alleviates users to write the boilerplate * google-guice bindings to create the SqlSessionFactory. * * @version $Id$ */ public final class MyBatisModule extends AbstractMyBatisModule { /** * The DataSource Provider class reference. */ private final Class<? extends Provider<DataSource>> dataSourceProviderType; /** * The TransactionFactory class reference. */ private final Class<? extends TransactionFactory> transactionFactoryType; /** * The user defined aliases. */ private final Map<String, Class<?>> aliases; /** * The user defined type handlers. */ private final Map<Class<?>, Class<? extends TypeHandler>> handlers; /** * The user defined Interceptor classes. */ private final Set<Class<? extends Interceptor>> interceptorsClasses; /** * The ObjectFactory class reference. */ private Class<? extends ObjectFactory> objectFactoryType; /** * The user defined mapper classes. */ private final Set<Class<?>> mapperClasses; /** * Creates a new module that binds all the needed modules to create the * SqlSessionFactory, injecting all the required components. * * @param dataSourceProviderType the DataSource Provider class reference. * @param transactionFactoryType the TransactionFactory class reference. * @param aliases the user defined aliases. * @param handlers the user defined type handlers. * @param interceptorsClasses the user defined Interceptor classes. * @param objectFactoryType the ObjectFactory class reference. * @param mapperClasses the user defined mapper classes. */ private MyBatisModule( Class<? extends Provider<DataSource>> dataSourceProviderType, Class<? extends TransactionFactory> transactionFactoryType, Map<String, Class<?>> aliases, Map<Class<?>, Class<? extends TypeHandler>> handlers, Set<Class<? extends Interceptor>> interceptorsClasses, Class<? extends ObjectFactory> objectFactoryType, Set<Class<?>> mapperClasses) { this.dataSourceProviderType = dataSourceProviderType; this.transactionFactoryType = transactionFactoryType; this.aliases = aliases; this.handlers = handlers; this.interceptorsClasses = interceptorsClasses; this.objectFactoryType = objectFactoryType; this.mapperClasses = mapperClasses; } /** * {@inheritDoc} */ @Override protected void configure() { super.configure(); // needed binding this.bind(DataSource.class).toProvider(this.dataSourceProviderType).in(Scopes.SINGLETON); this.bind(TransactionFactory.class).to(this.transactionFactoryType).in(Scopes.SINGLETON); this.bind(Environment.class).toProvider(EnvironmentProvider.class).in(Scopes.SINGLETON); this.bind(Configuration.class).toProvider(ConfigurationProvider.class).in(Scopes.SINGLETON); this.bind(ObjectFactory.class).to(this.objectFactoryType).in(Scopes.SINGLETON); this.bind(SqlSessionFactory.class).toProvider(SqlSessionFactoryProvider.class).in(Scopes.SINGLETON); // optional bindings // aliases if (!this.aliases.isEmpty()) { this.bind(new TypeLiteral<Map<String, Class<?>>>(){}).annotatedWith(TypeAliases.class).toInstance(this.aliases); } // type handlers foreach(this.handlers).handle(new Each<Map.Entry<Class<?>,Class<? extends TypeHandler>>>() { private MapBinder<Class<?>, TypeHandler> handlerBinder; public void doHandle(Entry<Class<?>, Class<? extends TypeHandler>> alias) { if (this.handlerBinder == null) { this.handlerBinder = MapBinder.newMapBinder(binder(), new TypeLiteral<Class<?>>(){}, new TypeLiteral<TypeHandler>(){}); } this.handlerBinder.addBinding(alias.getKey()).to(alias.getValue()).in(Scopes.SINGLETON); } }); // interceptors plugin foreach(this.interceptorsClasses).handle(new Each<Class<? extends Interceptor>>() { private Multibinder<Interceptor> interceptorsMultibinder; public void doHandle(Class<? extends Interceptor> interceptorType) { if (this.interceptorsMultibinder == null) { this.interceptorsMultibinder = Multibinder.newSetBinder(binder(), Interceptor.class); } this.interceptorsMultibinder.addBinding().to(interceptorType).in(Scopes.SINGLETON); } }); // mappers if (!this.mapperClasses.isEmpty()) { this.bind(new TypeLiteral<Set<Class<?>>>() {}).annotatedWith(Mappers.class).toInstance(this.mapperClasses); foreach(this.mapperClasses).handle(new EachMapper(this.binder())); } } /** * The {@link MyBatisModule} Builder. * * By default the Builder uses the following settings: * <ul> * <li>DataSource Provider type: {@link UnpooledDataSourceProvider};</li> * <li>TransactionFactory type: org.apache.ibatis.transaction.jdbc.JdbcTransactionFactory;</li> * <li>ObjectFactory type: org.apache.ibatis.reflection.factory.ObjectFactory.</li> * </ul> */ public static final class Builder { /** * The DataSource Provider class reference. */ private Class<? extends Provider<DataSource>> dataSourceProviderType = UnpooledDataSourceProvider.class; /** * The TransactionFactory class reference. */ private Class<? extends TransactionFactory> transactionFactoryType = JdbcTransactionFactory.class; /** * The user defined aliases. */ private final Map<String, Class<?>> aliases = new HashMap<String, Class<?>>(); /** * The user defined type handlers. */ private final Map<Class<?>, Class<? extends TypeHandler>> handlers = new HashMap<Class<?>, Class<? extends TypeHandler>>(); /** * The user defined Interceptor classes. */ private final Set<Class<? extends Interceptor>> interceptorsClasses = new HashSet<Class<? extends Interceptor>>(); /** * The ObjectFactory Provider class reference. */ private Class<? extends ObjectFactory> objectFactoryType = DefaultObjectFactory.class; /** * The user defined mapper classes. */ private final Set<Class<?>> mapperClasses = new LinkedHashSet<Class<?>>(); /** * Set the DataSource Provider type has to be bound. * * @param dataSourceProviderType the DataSource Provider type. * @return this {@code Builder} instance. */ public Builder setDataSourceProviderType(Class<? extends Provider<DataSource>> dataSourceProviderType) { if (dataSourceProviderType == null) { throw new IllegalArgumentException("Parameter 'dataSourceProviderType' must be not null"); } this.dataSourceProviderType = dataSourceProviderType; return this; } /** * Set the TransactionFactory type has to be bound. * * @param transactionFactoryType the TransactionFactory type. * @return this {@code Builder} instance. */ public Builder setTransactionFactoryType(Class<? extends TransactionFactory> transactionFactoryType) { if (transactionFactoryType == null) { throw new IllegalArgumentException("Parameter 'transactionFactoryType' must be not null"); } this.transactionFactoryType = transactionFactoryType; return this; } /** * Adding simple aliases means that every specified class will be bound * using the simple class name, i.e. {@code com.acme.Foo} becomes {@code Foo}. * * @param types the specified types have to be bind. * @return this {@code Builder} instance. */ public Builder addSimpleAliases(final Class<?>...types) { if (types != null) { return this.addSimpleAliases(Arrays.asList(types)); } return this; } /** * Adding simple aliases means that every specified class will be bound * using the simple class name, i.e. {@code com.acme.Foo} becomes {@code Foo}. * * @param types the specified types have to be bind. * @return this {@code Builder} instance. */ public Builder addSimpleAliases(final Collection<Class<?>> types) { foreach(types).handle(new Each<Class<?>>() { public void doHandle(Class<?> clazz) { addAlias(clazz.getSimpleName(), clazz); } }); return this; } /** * Adds all Classes in the given package as a simple alias. * Adding simple aliases means that every specified class will be bound * using the simple class name, i.e. {@code com.acme.Foo} becomes {@code Foo}. * * @param packageName the specified package to search for classes to alias. * @return this {@code Builder} instance. */ public Builder addSimpleAliases(final String packageName) { return this.addSimpleAliases(getClasses(packageName)); } /** * Adds all Classes in the given package as a simple alias. * Adding simple aliases means that every specified class will be bound * using the simple class name, i.e. {@code com.acme.Foo} becomes {@code Foo}. * * @param packageName the specified package to search for classes to alias. * @param test a test to run against the objects found in the specified package. * @return this {@code Builder} instance. */ public Builder addSimpleAliases(final String packageName, final ResolverUtil.Test test) { return this.addSimpleAliases(getClasses(test, packageName)); } /** * Add a user defined binding. * * @param alias the string type alias * @param clazz the type has to be bound. */ public Builder addAlias(final String alias, final Class<?> clazz) { this.aliases.put(alias, clazz); return this; } /** * Add a user defined Type Handler letting google-guice creating it. * * @param type the specified type has to be handled. * @param handler the handler type. * @return this {@code Builder} instance. */ public Builder addTypeHandler(final Class<?> type, final Class<? extends TypeHandler> handler) { this.handlers.put(type, handler); return this; } /** * Adds the user defined myBatis interceptors plugins types, letting * google-guice creating them. * * @param interceptorsClasses the user defined MyBatis interceptors plugins types. * @return this {@code Builder} instance. * */ public Builder addInterceptorsClasses(Class<? extends Interceptor>...interceptorsClasses) { if (interceptorsClasses != null) { return this.addInterceptorsClasses(Arrays.asList(interceptorsClasses)); } return this; } /** * Adds the user defined MyBatis interceptors plugins types, letting * google-guice creating them. * * @param interceptorsClasses the user defined MyBatis Interceptors plugins types. * @return this {@code Builder} instance. * */ public Builder addInterceptorsClasses(Collection<Class<? extends Interceptor>> interceptorsClasses) { if (interceptorsClasses != null) { this.interceptorsClasses.addAll(interceptorsClasses); } return this; } /** * Adds the user defined MyBatis interceptors plugins types in the given package, * letting google-guice creating them. * * @param packageName the package where looking for Interceptors plugins types. * @return this {@code Builder} instance. */ public Builder addInterceptorsClasses(String packageName) { if (packageName == null) { throw new IllegalArgumentException("Parameter 'packageName' must be not null"); } return this.addInterceptorsClasses(new ResolverUtil<Interceptor>() .find(new ResolverUtil.IsA(Interceptor.class), packageName) .getClasses()); } /** * Sets the ObjectFactory class. * * @param objectFactoryType the ObjectFactory type. * @return this {@code Builder} instance. */ public Builder setObjectFactoryType(Class<? extends ObjectFactory> objectFactoryType) { if (objectFactoryType == null) { throw new IllegalArgumentException("Parameter 'objectFactoryType' must be not null"); } this.objectFactoryType = objectFactoryType; return this; } /** * Adds the user defined mapper classes. * * @param mapperClasses the user defined mapper classes. * @return this {@code Builder} instance. * */ public Builder addMapperClasses(Class<?>...mapperClasses) { if (mapperClasses != null) { return this.addMapperClasses(Arrays.asList(mapperClasses)); } return this; } /** * Adds the user defined mapper classes. * * @param mapperClasses the user defined mapper classes. * @return this {@code Builder} instance. * */ public Builder addMapperClasses(Collection<Class<?>> mapperClasses) { if (mapperClasses != null) { this.mapperClasses.addAll(mapperClasses); } return this; } /** * Adds the user defined mapper classes. * * @param packageName the specified package to search for mappers to add. * @return this {@code Builder} instance. * */ public Builder addMapperClasses(final String packageName) { return this.addMapperClasses(getClasses(packageName)); } /** * Adds the user defined mapper classes. * * @param packageName the specified package to search for mappers to add. * @param test a test to run against the objects found in the specified package. * @return this {@code Builder} instance. * */ public Builder addMapperClasses(final String packageName, final ResolverUtil.Test test) { return this.addMapperClasses(getClasses(test, packageName)); } /** * Create a new {@link MyBatisModule} instance based on this {@link Builder} * instance configuration. * * @return a new {@link MyBatisModule} instance. */ public Module create() { return new MyBatisModule(this.dataSourceProviderType, this.transactionFactoryType, this.aliases, this.handlers, this.interceptorsClasses, this.objectFactoryType, this.mapperClasses); } /** * Return a set of all classes contained in the given package. * * @param packageName the package has to be analyzed. * @return a set of all classes contained in the given package. */ private static Set<Class<?>> getClasses(String packageName) { return getClasses(new ResolverUtil.IsA(Object.class), packageName); } /** * Return a set of all classes contained in the given package that match with * the given test requirement. * * @param test the class filter on the given package. * @param packageName the package has to be analyzed. * @return a set of all classes contained in the given package. */ private static Set<Class<?>> getClasses(ResolverUtil.Test test, String packageName) { if (test == null) { throw new IllegalArgumentException("Parameter 'test' must be not null"); } if (packageName == null) { throw new IllegalArgumentException("Parameter 'packageName' must be not null"); } return new ResolverUtil<Object>().find(test, packageName).getClasses(); } } }
src/main/java/org/mybatis/guice/MyBatisModule.java
/* * Copyright 2010 The myBatis Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.mybatis.guice; import static org.mybatis.guice.iterables.Iterables.foreach; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; import java.util.Map.Entry; import javax.sql.DataSource; import org.apache.ibatis.io.ResolverUtil; import org.apache.ibatis.mapping.Environment; import org.apache.ibatis.plugin.Interceptor; import org.apache.ibatis.reflection.factory.DefaultObjectFactory; import org.apache.ibatis.reflection.factory.ObjectFactory; import org.apache.ibatis.session.Configuration; import org.apache.ibatis.session.SqlSessionFactory; import org.apache.ibatis.transaction.TransactionFactory; import org.apache.ibatis.transaction.jdbc.JdbcTransactionFactory; import org.apache.ibatis.type.TypeHandler; import org.mybatis.guice.configuration.ConfigurationProvider; import org.mybatis.guice.configuration.Mappers; import org.mybatis.guice.configuration.TypeAliases; import org.mybatis.guice.datasource.builtin.UnpooledDataSourceProvider; import org.mybatis.guice.environment.EnvironmentProvider; import org.mybatis.guice.iterables.Each; import org.mybatis.guice.session.SqlSessionFactoryProvider; import com.google.inject.Module; import com.google.inject.Provider; import com.google.inject.Scopes; import com.google.inject.TypeLiteral; import com.google.inject.multibindings.MapBinder; import com.google.inject.multibindings.Multibinder; /** * Easy to use helper Module that alleviates users to write the boilerplate * google-guice bindings to create the SqlSessionFactory. * * @version $Id$ */ public final class MyBatisModule extends AbstractMyBatisModule { /** * The DataSource Provider class reference. */ private final Class<? extends Provider<DataSource>> dataSourceProviderType; /** * The TransactionFactory class reference. */ private final Class<? extends TransactionFactory> transactionFactoryType; /** * The user defined aliases. */ private final Map<String, Class<?>> aliases; /** * The user defined type handlers. */ private final Map<Class<?>, Class<? extends TypeHandler>> handlers; /** * The user defined Interceptor classes. */ private final Set<Class<? extends Interceptor>> interceptorsClasses; /** * The ObjectFactory class reference. */ private Class<? extends ObjectFactory> objectFactoryType; /** * The user defined mapper classes. */ private final Set<Class<?>> mapperClasses; /** * Creates a new module that binds all the needed modules to create the * SqlSessionFactory, injecting all the required components. * * @param dataSourceProviderType the DataSource Provider class reference. * @param transactionFactoryType the TransactionFactory class reference. * @param aliases the user defined aliases. * @param handlers the user defined type handlers. * @param interceptorsClasses the user defined Interceptor classes. * @param objectFactoryType the ObjectFactory class reference. * @param mapperClasses the user defined mapper classes. */ private MyBatisModule( Class<? extends Provider<DataSource>> dataSourceProviderType, Class<? extends TransactionFactory> transactionFactoryType, Map<String, Class<?>> aliases, Map<Class<?>, Class<? extends TypeHandler>> handlers, Set<Class<? extends Interceptor>> interceptorsClasses, Class<? extends ObjectFactory> objectFactoryType, Set<Class<?>> mapperClasses) { this.dataSourceProviderType = dataSourceProviderType; this.transactionFactoryType = transactionFactoryType; this.aliases = aliases; this.handlers = handlers; this.interceptorsClasses = interceptorsClasses; this.objectFactoryType = objectFactoryType; this.mapperClasses = mapperClasses; } /** * {@inheritDoc} */ @Override protected void configure() { super.configure(); // needed binding this.bind(DataSource.class).toProvider(this.dataSourceProviderType).in(Scopes.SINGLETON); this.bind(TransactionFactory.class).to(this.transactionFactoryType).in(Scopes.SINGLETON); this.bind(Environment.class).toProvider(EnvironmentProvider.class).in(Scopes.SINGLETON); this.bind(Configuration.class).toProvider(ConfigurationProvider.class).in(Scopes.SINGLETON); this.bind(ObjectFactory.class).to(this.objectFactoryType).in(Scopes.SINGLETON); this.bind(SqlSessionFactory.class).toProvider(SqlSessionFactoryProvider.class); // optional bindings // aliases if (!this.aliases.isEmpty()) { this.bind(new TypeLiteral<Map<String, Class<?>>>(){}).annotatedWith(TypeAliases.class).toInstance(this.aliases); } // type handlers foreach(this.handlers).handle(new Each<Map.Entry<Class<?>,Class<? extends TypeHandler>>>() { private MapBinder<Class<?>, TypeHandler> handlerBinder; public void doHandle(Entry<Class<?>, Class<? extends TypeHandler>> alias) { if (this.handlerBinder == null) { this.handlerBinder = MapBinder.newMapBinder(binder(), new TypeLiteral<Class<?>>(){}, new TypeLiteral<TypeHandler>(){}); } this.handlerBinder.addBinding(alias.getKey()).to(alias.getValue()).in(Scopes.SINGLETON); } }); // interceptors plugin foreach(this.interceptorsClasses).handle(new Each<Class<? extends Interceptor>>() { private Multibinder<Interceptor> interceptorsMultibinder; public void doHandle(Class<? extends Interceptor> interceptorType) { if (this.interceptorsMultibinder == null) { this.interceptorsMultibinder = Multibinder.newSetBinder(binder(), Interceptor.class); } this.interceptorsMultibinder.addBinding().to(interceptorType).in(Scopes.SINGLETON); } }); // mappers if (!this.mapperClasses.isEmpty()) { this.bind(new TypeLiteral<Set<Class<?>>>() {}).annotatedWith(Mappers.class).toInstance(this.mapperClasses); foreach(this.mapperClasses).handle(new EachMapper(this.binder())); } } /** * The {@link MyBatisModule} Builder. * * By default the Builder uses the following settings: * <ul> * <li>DataSource Provider type: {@link UnpooledDataSourceProvider};</li> * <li>TransactionFactory type: org.apache.ibatis.transaction.jdbc.JdbcTransactionFactory;</li> * <li>ObjectFactory type: org.apache.ibatis.reflection.factory.ObjectFactory.</li> * </ul> */ public static final class Builder { /** * The DataSource Provider class reference. */ private Class<? extends Provider<DataSource>> dataSourceProviderType = UnpooledDataSourceProvider.class; /** * The TransactionFactory class reference. */ private Class<? extends TransactionFactory> transactionFactoryType = JdbcTransactionFactory.class; /** * The user defined aliases. */ private final Map<String, Class<?>> aliases = new HashMap<String, Class<?>>(); /** * The user defined type handlers. */ private final Map<Class<?>, Class<? extends TypeHandler>> handlers = new HashMap<Class<?>, Class<? extends TypeHandler>>(); /** * The user defined Interceptor classes. */ private final Set<Class<? extends Interceptor>> interceptorsClasses = new HashSet<Class<? extends Interceptor>>(); /** * The ObjectFactory Provider class reference. */ private Class<? extends ObjectFactory> objectFactoryType = DefaultObjectFactory.class; /** * The user defined mapper classes. */ private final Set<Class<?>> mapperClasses = new LinkedHashSet<Class<?>>(); /** * Set the DataSource Provider type has to be bound. * * @param dataSourceProviderType the DataSource Provider type. * @return this {@code Builder} instance. */ public Builder setDataSourceProviderType(Class<? extends Provider<DataSource>> dataSourceProviderType) { if (dataSourceProviderType == null) { throw new IllegalArgumentException("Parameter 'dataSourceProviderType' must be not null"); } this.dataSourceProviderType = dataSourceProviderType; return this; } /** * Set the TransactionFactory type has to be bound. * * @param transactionFactoryType the TransactionFactory type. * @return this {@code Builder} instance. */ public Builder setTransactionFactoryType(Class<? extends TransactionFactory> transactionFactoryType) { if (transactionFactoryType == null) { throw new IllegalArgumentException("Parameter 'transactionFactoryType' must be not null"); } this.transactionFactoryType = transactionFactoryType; return this; } /** * Adding simple aliases means that every specified class will be bound * using the simple class name, i.e. {@code com.acme.Foo} becomes {@code Foo}. * * @param types the specified types have to be bind. * @return this {@code Builder} instance. */ public Builder addSimpleAliases(final Class<?>...types) { if (types != null) { return this.addSimpleAliases(Arrays.asList(types)); } return this; } /** * Adding simple aliases means that every specified class will be bound * using the simple class name, i.e. {@code com.acme.Foo} becomes {@code Foo}. * * @param types the specified types have to be bind. * @return this {@code Builder} instance. */ public Builder addSimpleAliases(final Collection<Class<?>> types) { foreach(types).handle(new Each<Class<?>>() { public void doHandle(Class<?> clazz) { addAlias(clazz.getSimpleName(), clazz); } }); return this; } /** * Adds all Classes in the given package as a simple alias. * Adding simple aliases means that every specified class will be bound * using the simple class name, i.e. {@code com.acme.Foo} becomes {@code Foo}. * * @param packageName the specified package to search for classes to alias. * @return this {@code Builder} instance. */ public Builder addSimpleAliases(final String packageName) { return this.addSimpleAliases(getClasses(packageName)); } /** * Adds all Classes in the given package as a simple alias. * Adding simple aliases means that every specified class will be bound * using the simple class name, i.e. {@code com.acme.Foo} becomes {@code Foo}. * * @param packageName the specified package to search for classes to alias. * @param test a test to run against the objects found in the specified package. * @return this {@code Builder} instance. */ public Builder addSimpleAliases(final String packageName, final ResolverUtil.Test test) { return this.addSimpleAliases(getClasses(test, packageName)); } /** * Add a user defined binding. * * @param alias the string type alias * @param clazz the type has to be bound. */ public Builder addAlias(final String alias, final Class<?> clazz) { this.aliases.put(alias, clazz); return this; } /** * Add a user defined Type Handler letting google-guice creating it. * * @param type the specified type has to be handled. * @param handler the handler type. * @return this {@code Builder} instance. */ public Builder addTypeHandler(final Class<?> type, final Class<? extends TypeHandler> handler) { this.handlers.put(type, handler); return this; } /** * Adds the user defined myBatis interceptors plugins types, letting * google-guice creating them. * * @param interceptorsClasses the user defined MyBatis interceptors plugins types. * @return this {@code Builder} instance. * */ public Builder addInterceptorsClasses(Class<? extends Interceptor>...interceptorsClasses) { if (interceptorsClasses != null) { return this.addInterceptorsClasses(Arrays.asList(interceptorsClasses)); } return this; } /** * Adds the user defined MyBatis interceptors plugins types, letting * google-guice creating them. * * @param interceptorsClasses the user defined MyBatis Interceptors plugins types. * @return this {@code Builder} instance. * */ public Builder addInterceptorsClasses(Collection<Class<? extends Interceptor>> interceptorsClasses) { if (interceptorsClasses != null) { this.interceptorsClasses.addAll(interceptorsClasses); } return this; } /** * Adds the user defined MyBatis interceptors plugins types in the given package, * letting google-guice creating them. * * @param packageName the package where looking for Interceptors plugins types. * @return this {@code Builder} instance. */ public Builder addInterceptorsClasses(String packageName) { if (packageName == null) { throw new IllegalArgumentException("Parameter 'packageName' must be not null"); } return this.addInterceptorsClasses(new ResolverUtil<Interceptor>() .find(new ResolverUtil.IsA(Interceptor.class), packageName) .getClasses()); } /** * Sets the ObjectFactory class. * * @param objectFactoryType the ObjectFactory type. * @return this {@code Builder} instance. */ public Builder setObjectFactoryType(Class<? extends ObjectFactory> objectFactoryType) { if (objectFactoryType == null) { throw new IllegalArgumentException("Parameter 'objectFactoryType' must be not null"); } this.objectFactoryType = objectFactoryType; return this; } /** * Adds the user defined mapper classes. * * @param mapperClasses the user defined mapper classes. * @return this {@code Builder} instance. * */ public Builder addMapperClasses(Class<?>...mapperClasses) { if (mapperClasses != null) { return this.addMapperClasses(Arrays.asList(mapperClasses)); } return this; } /** * Adds the user defined mapper classes. * * @param mapperClasses the user defined mapper classes. * @return this {@code Builder} instance. * */ public Builder addMapperClasses(Collection<Class<?>> mapperClasses) { if (mapperClasses != null) { this.mapperClasses.addAll(mapperClasses); } return this; } /** * Adds the user defined mapper classes. * * @param packageName the specified package to search for mappers to add. * @return this {@code Builder} instance. * */ public Builder addMapperClasses(final String packageName) { return this.addMapperClasses(getClasses(packageName)); } /** * Adds the user defined mapper classes. * * @param packageName the specified package to search for mappers to add. * @param test a test to run against the objects found in the specified package. * @return this {@code Builder} instance. * */ public Builder addMapperClasses(final String packageName, final ResolverUtil.Test test) { return this.addMapperClasses(getClasses(test, packageName)); } /** * Create a new {@link MyBatisModule} instance based on this {@link Builder} * instance configuration. * * @return a new {@link MyBatisModule} instance. */ public Module create() { return new MyBatisModule(this.dataSourceProviderType, this.transactionFactoryType, this.aliases, this.handlers, this.interceptorsClasses, this.objectFactoryType, this.mapperClasses); } /** * Return a set of all classes contained in the given package. * * @param packageName the package has to be analyzed. * @return a set of all classes contained in the given package. */ private static Set<Class<?>> getClasses(String packageName) { return getClasses(new ResolverUtil.IsA(Object.class), packageName); } /** * Return a set of all classes contained in the given package that match with * the given test requirement. * * @param test the class filter on the given package. * @param packageName the package has to be analyzed. * @return a set of all classes contained in the given package. */ private static Set<Class<?>> getClasses(ResolverUtil.Test test, String packageName) { if (test == null) { throw new IllegalArgumentException("Parameter 'test' must be not null"); } if (packageName == null) { throw new IllegalArgumentException("Parameter 'packageName' must be not null"); } return new ResolverUtil<Object>().find(test, packageName).getClasses(); } } }
added missing SqlSessionFactory singleton scope in binding
src/main/java/org/mybatis/guice/MyBatisModule.java
added missing SqlSessionFactory singleton scope in binding
<ide><path>rc/main/java/org/mybatis/guice/MyBatisModule.java <ide> this.bind(Environment.class).toProvider(EnvironmentProvider.class).in(Scopes.SINGLETON); <ide> this.bind(Configuration.class).toProvider(ConfigurationProvider.class).in(Scopes.SINGLETON); <ide> this.bind(ObjectFactory.class).to(this.objectFactoryType).in(Scopes.SINGLETON); <del> this.bind(SqlSessionFactory.class).toProvider(SqlSessionFactoryProvider.class); <add> this.bind(SqlSessionFactory.class).toProvider(SqlSessionFactoryProvider.class).in(Scopes.SINGLETON); <ide> <ide> // optional bindings <ide>
Java
apache-2.0
1074426a3acc154ed3c20e1f9c385bd696a83d15
0
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.codeInsight.daemon.impl; import com.intellij.codeInsight.daemon.GutterMark; import com.intellij.lang.annotation.HighlightSeverity; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.RangeMarker; import com.intellij.openapi.editor.colors.EditorColorsScheme; import com.intellij.openapi.editor.event.DocumentEvent; import com.intellij.openapi.editor.ex.DocumentEx; import com.intellij.openapi.editor.ex.MarkupModelEx; import com.intellij.openapi.editor.ex.RangeHighlighterEx; import com.intellij.openapi.editor.impl.DocumentMarkupModel; import com.intellij.openapi.editor.impl.RedBlackTree; import com.intellij.openapi.editor.impl.SweepProcessor; import com.intellij.openapi.editor.markup.*; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.*; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiFile; import com.intellij.util.Consumer; import com.intellij.util.Processor; import com.intellij.util.containers.ContainerUtil; import gnu.trove.THashMap; import gnu.trove.THashSet; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.awt.*; import java.util.*; import java.util.List; public class UpdateHighlightersUtil { private static final Comparator<HighlightInfo> BY_START_OFFSET_NODUPS = (o1, o2) -> { int d = o1.getActualStartOffset() - o2.getActualStartOffset(); if (d != 0) return d; d = o1.getActualEndOffset() - o2.getActualEndOffset(); if (d != 0) return d; d = Comparing.compare(o1.getSeverity(), o2.getSeverity()); if (d != 0) return -d; // higher severity first, to prevent warnings overlap errors if (!Comparing.equal(o1.type, o2.type)) { return String.valueOf(o1.type).compareTo(String.valueOf(o2.type)); } if (!Comparing.equal(o1.getGutterIconRenderer(), o2.getGutterIconRenderer())) { return String.valueOf(o1.getGutterIconRenderer()).compareTo(String.valueOf(o2.getGutterIconRenderer())); } if (!Comparing.equal(o1.forcedTextAttributes, o2.forcedTextAttributes)) { return String.valueOf(o1.getGutterIconRenderer()).compareTo(String.valueOf(o2.getGutterIconRenderer())); } if (!Comparing.equal(o1.forcedTextAttributesKey, o2.forcedTextAttributesKey)) { return String.valueOf(o1.getGutterIconRenderer()).compareTo(String.valueOf(o2.getGutterIconRenderer())); } return Comparing.compare(o1.getDescription(), o2.getDescription()); }; private static boolean isCoveredByOffsets(HighlightInfo info, HighlightInfo coveredBy) { return coveredBy.startOffset <= info.startOffset && info.endOffset <= coveredBy.endOffset && info.getGutterIconRenderer() == null; } static void addHighlighterToEditorIncrementally(@NotNull Project project, @NotNull Document document, @NotNull PsiFile file, int startOffset, int endOffset, @NotNull final HighlightInfo info, @Nullable final EditorColorsScheme colorsScheme, // if null global scheme will be used final int group, @NotNull Map<TextRange, RangeMarker> ranges2markersCache) { ApplicationManager.getApplication().assertIsDispatchThread(); if (isFileLevelOrGutterAnnotation(info)) return; if (info.getStartOffset() < startOffset || info.getEndOffset() > endOffset) return; MarkupModel markup = DocumentMarkupModel.forDocument(document, project, true); final SeverityRegistrar severityRegistrar = SeverityRegistrar.getSeverityRegistrar(project); final boolean myInfoIsError = isSevere(info, severityRegistrar); Processor<HighlightInfo> otherHighlightInTheWayProcessor = oldInfo -> { if (!myInfoIsError && isCovered(info, severityRegistrar, oldInfo)) { return false; } return oldInfo.getGroup() != group || !oldInfo.equalsByActualOffset(info); }; boolean allIsClear = DaemonCodeAnalyzerEx.processHighlights(document, project, null, info.getActualStartOffset(), info.getActualEndOffset(), otherHighlightInTheWayProcessor); if (allIsClear) { createOrReuseHighlighterFor(info, colorsScheme, document, group, file, (MarkupModelEx)markup, null, ranges2markersCache, severityRegistrar); clearWhiteSpaceOptimizationFlag(document); assertMarkupConsistent(markup, project); } } public static boolean isFileLevelOrGutterAnnotation(HighlightInfo info) { return info.isFileLevelAnnotation() || info.getGutterIconRenderer() != null; } public static void setHighlightersToEditor(@NotNull Project project, @NotNull Document document, int startOffset, int endOffset, @NotNull Collection<HighlightInfo> highlights, @Nullable final EditorColorsScheme colorsScheme, // if null global scheme will be used int group) { TextRange range = new TextRange(startOffset, endOffset); ApplicationManager.getApplication().assertIsDispatchThread(); PsiFile psiFile = PsiDocumentManager.getInstance(project).getPsiFile(document); final DaemonCodeAnalyzerEx codeAnalyzer = DaemonCodeAnalyzerEx.getInstanceEx(project); codeAnalyzer.cleanFileLevelHighlights(project, group, psiFile); MarkupModel markup = DocumentMarkupModel.forDocument(document, project, true); assertMarkupConsistent(markup, project); setHighlightersInRange(project, document, range, colorsScheme, new ArrayList<>(highlights), (MarkupModelEx)markup, group); } // set highlights inside startOffset,endOffset but outside priorityRange static void setHighlightersOutsideRange(@NotNull final Project project, @NotNull final Document document, @NotNull final PsiFile psiFile, @NotNull final List<? extends HighlightInfo> infos, @Nullable final EditorColorsScheme colorsScheme, // if null global scheme will be used final int startOffset, final int endOffset, @NotNull final ProperTextRange priorityRange, final int group) { ApplicationManager.getApplication().assertIsDispatchThread(); final DaemonCodeAnalyzerEx codeAnalyzer = DaemonCodeAnalyzerEx.getInstanceEx(project); if (startOffset == 0 && endOffset == document.getTextLength()) { codeAnalyzer.cleanFileLevelHighlights(project, group, psiFile); } final MarkupModel markup = DocumentMarkupModel.forDocument(document, project, true); assertMarkupConsistent(markup, project); final SeverityRegistrar severityRegistrar = SeverityRegistrar.getSeverityRegistrar(project); final HighlightersRecycler infosToRemove = new HighlightersRecycler(); ContainerUtil.quickSort(infos, BY_START_OFFSET_NODUPS); Set<HighlightInfo> infoSet = new THashSet<>(infos); Processor<HighlightInfo> processor = info -> { if (info.getGroup() == group) { RangeHighlighter highlighter = info.getHighlighter(); int hiStart = highlighter.getStartOffset(); int hiEnd = highlighter.getEndOffset(); if (!info.isFromInjection() && hiEnd < document.getTextLength() && (hiEnd != 0 && hiEnd <= startOffset || hiStart >= endOffset)) { return true; // injections are oblivious to restricting range } boolean toRemove = infoSet.contains(info) || !priorityRange.containsRange(hiStart, hiEnd) && (hiEnd != document.getTextLength() || priorityRange.getEndOffset() != document.getTextLength()); if (toRemove) { infosToRemove.recycleHighlighter(highlighter); info.setHighlighter(null); } } return true; }; DaemonCodeAnalyzerEx.processHighlightsOverlappingOutside(document, project, null, priorityRange.getStartOffset(), priorityRange.getEndOffset(), processor); final Map<TextRange, RangeMarker> ranges2markersCache = new THashMap<>(10); final boolean[] changed = {false}; SweepProcessor.Generator<HighlightInfo> generator = proc -> ContainerUtil.process(infos, proc); SweepProcessor.sweep(generator, (offset, info, atStart, overlappingIntervals) -> { if (!atStart) return true; if (!info.isFromInjection() && info.getEndOffset() < document.getTextLength() && (info.getEndOffset() <= startOffset || info.getStartOffset()>=endOffset)) return true; // injections are oblivious to restricting range if (info.isFileLevelAnnotation()) { codeAnalyzer.addFileLevelHighlight(project, group, info, psiFile); changed[0] = true; return true; } if (isWarningCoveredByError(info, overlappingIntervals, severityRegistrar)) { return true; } if (info.getStartOffset() < priorityRange.getStartOffset() || info.getEndOffset() > priorityRange.getEndOffset()) { createOrReuseHighlighterFor(info, colorsScheme, document, group, psiFile, (MarkupModelEx)markup, infosToRemove, ranges2markersCache, severityRegistrar); changed[0] = true; } return true; }); for (RangeHighlighter highlighter : infosToRemove.forAllInGarbageBin()) { highlighter.dispose(); changed[0] = true; } if (changed[0]) { clearWhiteSpaceOptimizationFlag(document); } assertMarkupConsistent(markup, project); } static void setHighlightersInRange(@NotNull final Project project, @NotNull final Document document, @NotNull final TextRange range, @Nullable final EditorColorsScheme colorsScheme, // if null global scheme will be used @NotNull final List<? extends HighlightInfo> infos, @NotNull final MarkupModelEx markup, final int group) { ApplicationManager.getApplication().assertIsDispatchThread(); final SeverityRegistrar severityRegistrar = SeverityRegistrar.getSeverityRegistrar(project); final HighlightersRecycler infosToRemove = new HighlightersRecycler(); DaemonCodeAnalyzerEx.processHighlights(document, project, null, range.getStartOffset(), range.getEndOffset(), info -> { if (info.getGroup() == group) { RangeHighlighter highlighter = info.getHighlighter(); int hiStart = highlighter.getStartOffset(); int hiEnd = highlighter.getEndOffset(); boolean willBeRemoved = hiEnd == document.getTextLength() && range.getEndOffset() == document.getTextLength() /*|| range.intersectsStrict(hiStart, hiEnd)*/ || range.containsRange(hiStart, hiEnd) /*|| hiStart <= range.getStartOffset() && hiEnd >= range.getEndOffset()*/; if (willBeRemoved) { infosToRemove.recycleHighlighter(highlighter); info.setHighlighter(null); } } return true; }); ContainerUtil.quickSort(infos, BY_START_OFFSET_NODUPS); final Map<TextRange, RangeMarker> ranges2markersCache = new THashMap<>(10); final PsiFile psiFile = PsiDocumentManager.getInstance(project).getPsiFile(document); final DaemonCodeAnalyzerEx codeAnalyzer = DaemonCodeAnalyzerEx.getInstanceEx(project); final boolean[] changed = {false}; SweepProcessor.Generator<HighlightInfo> generator = (Processor<HighlightInfo> processor) -> ContainerUtil.process(infos, processor); SweepProcessor.sweep(generator, (offset, info, atStart, overlappingIntervals) -> { if (!atStart) { return true; } if (info.isFileLevelAnnotation() && psiFile != null && psiFile.getViewProvider().isPhysical()) { codeAnalyzer.addFileLevelHighlight(project, group, info, psiFile); changed[0] = true; return true; } if (isWarningCoveredByError(info, overlappingIntervals, severityRegistrar)) { return true; } if (info.getStartOffset() >= range.getStartOffset() && info.getEndOffset() <= range.getEndOffset() && psiFile != null) { createOrReuseHighlighterFor(info, colorsScheme, document, group, psiFile, markup, infosToRemove, ranges2markersCache, severityRegistrar); changed[0] = true; } return true; }); for (RangeHighlighter highlighter : infosToRemove.forAllInGarbageBin()) { highlighter.dispose(); changed[0] = true; } if (changed[0]) { clearWhiteSpaceOptimizationFlag(document); } assertMarkupConsistent(markup, project); } private static boolean isWarningCoveredByError(@NotNull HighlightInfo info, @NotNull Collection<? extends HighlightInfo> overlappingIntervals, @NotNull SeverityRegistrar severityRegistrar) { if (!isSevere(info, severityRegistrar)) { for (HighlightInfo overlapping : overlappingIntervals) { if (isCovered(info, severityRegistrar, overlapping)) return true; } } return false; } private static boolean isCovered(@NotNull HighlightInfo warning, @NotNull SeverityRegistrar severityRegistrar, @NotNull HighlightInfo candidate) { if (!isCoveredByOffsets(warning, candidate)) return false; HighlightSeverity severity = candidate.getSeverity(); if (severity == HighlightInfoType.SYMBOL_TYPE_SEVERITY) return false; // syntax should not interfere with warnings return isSevere(candidate, severityRegistrar); } private static boolean isSevere(@NotNull HighlightInfo info, @NotNull SeverityRegistrar severityRegistrar) { HighlightSeverity severity = info.getSeverity(); return severityRegistrar.compare(HighlightSeverity.ERROR, severity) <= 0 || severity == HighlightInfoType.SYMBOL_TYPE_SEVERITY; } private static void createOrReuseHighlighterFor(@NotNull final HighlightInfo info, @Nullable final EditorColorsScheme colorsScheme, // if null global scheme will be used @NotNull final Document document, final int group, @NotNull final PsiFile psiFile, @NotNull MarkupModelEx markup, @Nullable HighlightersRecycler infosToRemove, @NotNull final Map<TextRange, RangeMarker> ranges2markersCache, @NotNull SeverityRegistrar severityRegistrar) { int infoStartOffset = info.startOffset; int infoEndOffset = info.endOffset; final int docLength = document.getTextLength(); if (infoEndOffset > docLength) { infoEndOffset = docLength; infoStartOffset = Math.min(infoStartOffset, infoEndOffset); } if (infoEndOffset == infoStartOffset && !info.isAfterEndOfLine()) { if (infoEndOffset == docLength) return; // empty highlighter beyond file boundaries infoEndOffset++; //show something in case of empty highlightinfo } info.setGroup(group); int layer = getLayer(info, severityRegistrar); RangeHighlighterEx highlighter = infosToRemove == null ? null : (RangeHighlighterEx)infosToRemove.pickupHighlighterFromGarbageBin(infoStartOffset, infoEndOffset, layer); final TextRange finalInfoRange = new TextRange(infoStartOffset, infoEndOffset); final TextAttributes infoAttributes = info.getTextAttributes(psiFile, colorsScheme); Consumer<RangeHighlighterEx> changeAttributes = finalHighlighter -> { if (infoAttributes != null) { finalHighlighter.setTextAttributes(infoAttributes); } info.setHighlighter(finalHighlighter); finalHighlighter.setAfterEndOfLine(info.isAfterEndOfLine()); Color color = info.getErrorStripeMarkColor(psiFile, colorsScheme); finalHighlighter.setErrorStripeMarkColor(color); if (info != finalHighlighter.getErrorStripeTooltip()) { finalHighlighter.setErrorStripeTooltip(info); } GutterMark renderer = info.getGutterIconRenderer(); finalHighlighter.setGutterIconRenderer((GutterIconRenderer)renderer); ranges2markersCache.put(finalInfoRange, info.getHighlighter()); if (info.quickFixActionRanges != null) { List<Pair<HighlightInfo.IntentionActionDescriptor, RangeMarker>> list = new ArrayList<>(info.quickFixActionRanges.size()); for (Pair<HighlightInfo.IntentionActionDescriptor, TextRange> pair : info.quickFixActionRanges) { TextRange textRange = pair.second; RangeMarker marker = getOrCreate(document, ranges2markersCache, textRange); list.add(Pair.create(pair.first, marker)); } info.quickFixActionMarkers = ContainerUtil.createLockFreeCopyOnWriteList(list); } ProperTextRange fixRange = info.getFixTextRange(); if (finalInfoRange.equals(fixRange)) { info.fixMarker = null; // null means it the same as highlighter' } else { info.fixMarker = getOrCreate(document, ranges2markersCache, fixRange); } }; if (highlighter == null) { highlighter = markup.addRangeHighlighterAndChangeAttributes(infoStartOffset, infoEndOffset, layer, null, HighlighterTargetArea.EXACT_RANGE, false, changeAttributes); if (HighlightInfoType.VISIBLE_IF_FOLDED.contains(info.type)) { highlighter.setVisibleIfFolded(true); } } else { markup.changeAttributesInBatch(highlighter, changeAttributes); } if (infoAttributes != null) { boolean attributesSet = Comparing.equal(infoAttributes, highlighter.getTextAttributes()); assert attributesSet : "Info: " + infoAttributes + "; colorsScheme: " + (colorsScheme == null ? "[global]" : colorsScheme.getName()) + "; highlighter:" + highlighter.getTextAttributes(); } } private static int getLayer(@NotNull HighlightInfo info, @NotNull SeverityRegistrar severityRegistrar) { final HighlightSeverity severity = info.getSeverity(); int layer; if (severity == HighlightSeverity.WARNING) { layer = HighlighterLayer.WARNING; } else if (severity == HighlightSeverity.WEAK_WARNING) { layer = HighlighterLayer.WEAK_WARNING; } else if (severityRegistrar.compare(severity, HighlightSeverity.ERROR) >= 0) { layer = HighlighterLayer.ERROR; } else if (severity == HighlightInfoType.INJECTED_FRAGMENT_SEVERITY) { layer = HighlighterLayer.CARET_ROW-1; } else if (severity == HighlightInfoType.ELEMENT_UNDER_CARET_SEVERITY) { layer = HighlighterLayer.ELEMENT_UNDER_CARET; } else { layer = HighlighterLayer.ADDITIONAL_SYNTAX; } return layer; } @NotNull private static RangeMarker getOrCreate(@NotNull Document document, @NotNull Map<TextRange, RangeMarker> ranges2markersCache, @NotNull TextRange textRange) { return ranges2markersCache.computeIfAbsent(textRange, __ -> document.createRangeMarker(textRange)); } private static final Key<Boolean> TYPING_INSIDE_HIGHLIGHTER_OCCURRED = Key.create("TYPING_INSIDE_HIGHLIGHTER_OCCURRED"); static boolean isWhitespaceOptimizationAllowed(@NotNull Document document) { return document.getUserData(TYPING_INSIDE_HIGHLIGHTER_OCCURRED) == null; } private static void disableWhiteSpaceOptimization(@NotNull Document document) { document.putUserData(TYPING_INSIDE_HIGHLIGHTER_OCCURRED, Boolean.TRUE); } private static void clearWhiteSpaceOptimizationFlag(@NotNull Document document) { document.putUserData(TYPING_INSIDE_HIGHLIGHTER_OCCURRED, null); } static void updateHighlightersByTyping(@NotNull Project project, @NotNull DocumentEvent e) { ApplicationManager.getApplication().assertIsDispatchThread(); final Document document = e.getDocument(); if (document instanceof DocumentEx && ((DocumentEx)document).isInBulkUpdate()) return; final MarkupModel markup = DocumentMarkupModel.forDocument(document, project, true); assertMarkupConsistent(markup, project); final int start = e.getOffset() - 1; final int end = start + e.getOldLength(); final List<HighlightInfo> toRemove = new ArrayList<>(); DaemonCodeAnalyzerEx.processHighlights(document, project, null, start, end, info -> { if (!info.needUpdateOnTyping()) return true; RangeHighlighter highlighter = info.getHighlighter(); int highlighterStart = highlighter.getStartOffset(); int highlighterEnd = highlighter.getEndOffset(); if (info.isAfterEndOfLine()) { if (highlighterStart < document.getTextLength()) { highlighterStart += 1; } if (highlighterEnd < document.getTextLength()) { highlighterEnd += 1; } } if (!highlighter.isValid() || start < highlighterEnd && highlighterStart <= end) { toRemove.add(info); } return true; }); for (HighlightInfo info : toRemove) { if (!info.getHighlighter().isValid() || info.type.equals(HighlightInfoType.WRONG_REF)) { info.getHighlighter().dispose(); } } assertMarkupConsistent(markup, project); if (!toRemove.isEmpty()) { disableWhiteSpaceOptimization(document); } } private static void assertMarkupConsistent(@NotNull final MarkupModel markup, @NotNull Project project) { if (!RedBlackTree.VERIFY) { return; } Document document = markup.getDocument(); DaemonCodeAnalyzerEx.processHighlights(document, project, null, 0, document.getTextLength(), info -> { assert ((MarkupModelEx)markup).containsHighlighter(info.getHighlighter()); return true; }); RangeHighlighter[] allHighlighters = markup.getAllHighlighters(); for (RangeHighlighter highlighter : allHighlighters) { if (!highlighter.isValid()) continue; HighlightInfo info = HighlightInfo.fromRangeHighlighter(highlighter); if (info == null) continue; boolean contains = !DaemonCodeAnalyzerEx .processHighlights(document, project, null, info.getActualStartOffset(), info.getActualEndOffset(), highlightInfo -> BY_START_OFFSET_NODUPS.compare(highlightInfo, info) != 0); assert contains: info; } } }
platform/analysis-impl/src/com/intellij/codeInsight/daemon/impl/UpdateHighlightersUtil.java
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.codeInsight.daemon.impl; import com.intellij.codeInsight.daemon.GutterMark; import com.intellij.lang.annotation.HighlightSeverity; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.RangeMarker; import com.intellij.openapi.editor.colors.EditorColorsScheme; import com.intellij.openapi.editor.event.DocumentEvent; import com.intellij.openapi.editor.ex.DocumentEx; import com.intellij.openapi.editor.ex.MarkupModelEx; import com.intellij.openapi.editor.ex.RangeHighlighterEx; import com.intellij.openapi.editor.impl.DocumentMarkupModel; import com.intellij.openapi.editor.impl.RedBlackTree; import com.intellij.openapi.editor.impl.SweepProcessor; import com.intellij.openapi.editor.markup.*; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.*; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiFile; import com.intellij.util.Consumer; import com.intellij.util.Processor; import com.intellij.util.containers.ContainerUtil; import gnu.trove.THashMap; import gnu.trove.THashSet; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.awt.*; import java.util.*; import java.util.List; public class UpdateHighlightersUtil { private static final Comparator<HighlightInfo> BY_START_OFFSET_NODUPS = (o1, o2) -> { int d = o1.getActualStartOffset() - o2.getActualStartOffset(); if (d != 0) return d; d = o1.getActualEndOffset() - o2.getActualEndOffset(); if (d != 0) return d; d = Comparing.compare(o1.getSeverity(), o2.getSeverity()); if (d != 0) return -d; // higher severity first, to prevent warnings overlap errors if (!Comparing.equal(o1.type, o2.type)) { return String.valueOf(o1.type).compareTo(String.valueOf(o2.type)); } if (!Comparing.equal(o1.getGutterIconRenderer(), o2.getGutterIconRenderer())) { return String.valueOf(o1.getGutterIconRenderer()).compareTo(String.valueOf(o2.getGutterIconRenderer())); } if (!Comparing.equal(o1.forcedTextAttributes, o2.forcedTextAttributes)) { return String.valueOf(o1.getGutterIconRenderer()).compareTo(String.valueOf(o2.getGutterIconRenderer())); } if (!Comparing.equal(o1.forcedTextAttributesKey, o2.forcedTextAttributesKey)) { return String.valueOf(o1.getGutterIconRenderer()).compareTo(String.valueOf(o2.getGutterIconRenderer())); } return Comparing.compare(o1.getDescription(), o2.getDescription()); }; private static boolean isCoveredByOffsets(HighlightInfo info, HighlightInfo coveredBy) { return coveredBy.startOffset <= info.startOffset && info.endOffset <= coveredBy.endOffset && info.getGutterIconRenderer() == null; } static void addHighlighterToEditorIncrementally(@NotNull Project project, @NotNull Document document, @NotNull PsiFile file, int startOffset, int endOffset, @NotNull final HighlightInfo info, @Nullable final EditorColorsScheme colorsScheme, // if null global scheme will be used final int group, @NotNull Map<TextRange, RangeMarker> ranges2markersCache) { ApplicationManager.getApplication().assertIsDispatchThread(); if (isFileLevelOrGutterAnnotation(info)) return; if (info.getStartOffset() < startOffset || info.getEndOffset() > endOffset) return; MarkupModel markup = DocumentMarkupModel.forDocument(document, project, true); final SeverityRegistrar severityRegistrar = SeverityRegistrar.getSeverityRegistrar(project); final boolean myInfoIsError = isSevere(info, severityRegistrar); Processor<HighlightInfo> otherHighlightInTheWayProcessor = oldInfo -> { if (!myInfoIsError && isCovered(info, severityRegistrar, oldInfo)) { return false; } return oldInfo.getGroup() != group || !oldInfo.equalsByActualOffset(info); }; boolean allIsClear = DaemonCodeAnalyzerEx.processHighlights(document, project, null, info.getActualStartOffset(), info.getActualEndOffset(), otherHighlightInTheWayProcessor); if (allIsClear) { createOrReuseHighlighterFor(info, colorsScheme, document, group, file, (MarkupModelEx)markup, null, ranges2markersCache, severityRegistrar); clearWhiteSpaceOptimizationFlag(document); assertMarkupConsistent(markup, project); } } public static boolean isFileLevelOrGutterAnnotation(HighlightInfo info) { return info.isFileLevelAnnotation() || info.getGutterIconRenderer() != null; } public static void setHighlightersToEditor(@NotNull Project project, @NotNull Document document, int startOffset, int endOffset, @NotNull Collection<HighlightInfo> highlights, @Nullable final EditorColorsScheme colorsScheme, // if null global scheme will be used int group) { TextRange range = new TextRange(startOffset, endOffset); ApplicationManager.getApplication().assertIsDispatchThread(); PsiFile psiFile = PsiDocumentManager.getInstance(project).getPsiFile(document); final DaemonCodeAnalyzerEx codeAnalyzer = DaemonCodeAnalyzerEx.getInstanceEx(project); codeAnalyzer.cleanFileLevelHighlights(project, group, psiFile); MarkupModel markup = DocumentMarkupModel.forDocument(document, project, true); assertMarkupConsistent(markup, project); setHighlightersInRange(project, document, range, colorsScheme, new ArrayList<>(highlights), (MarkupModelEx)markup, group); } // set highlights inside startOffset,endOffset but outside priorityRange static void setHighlightersOutsideRange(@NotNull final Project project, @NotNull final Document document, @NotNull final PsiFile psiFile, @NotNull final List<? extends HighlightInfo> infos, @Nullable final EditorColorsScheme colorsScheme, // if null global scheme will be used final int startOffset, final int endOffset, @NotNull final ProperTextRange priorityRange, final int group) { ApplicationManager.getApplication().assertIsDispatchThread(); final DaemonCodeAnalyzerEx codeAnalyzer = DaemonCodeAnalyzerEx.getInstanceEx(project); if (startOffset == 0 && endOffset == document.getTextLength()) { codeAnalyzer.cleanFileLevelHighlights(project, group, psiFile); } final MarkupModel markup = DocumentMarkupModel.forDocument(document, project, true); assertMarkupConsistent(markup, project); final SeverityRegistrar severityRegistrar = SeverityRegistrar.getSeverityRegistrar(project); final HighlightersRecycler infosToRemove = new HighlightersRecycler(); ContainerUtil.quickSort(infos, BY_START_OFFSET_NODUPS); Set<HighlightInfo> infoSet = new THashSet<>(infos); Processor<HighlightInfo> processor = info -> { if (info.getGroup() == group) { RangeHighlighter highlighter = info.getHighlighter(); int hiStart = highlighter.getStartOffset(); int hiEnd = highlighter.getEndOffset(); if (!info.isFromInjection() && hiEnd < document.getTextLength() && (hiEnd <= startOffset || hiStart >= endOffset)) { return true; // injections are oblivious to restricting range } boolean toRemove = infoSet.contains(info) || !priorityRange.containsRange(hiStart, hiEnd) && (hiEnd != document.getTextLength() || priorityRange.getEndOffset() != document.getTextLength()); if (toRemove) { infosToRemove.recycleHighlighter(highlighter); info.setHighlighter(null); } } return true; }; DaemonCodeAnalyzerEx.processHighlightsOverlappingOutside(document, project, null, priorityRange.getStartOffset(), priorityRange.getEndOffset(), processor); final Map<TextRange, RangeMarker> ranges2markersCache = new THashMap<>(10); final boolean[] changed = {false}; SweepProcessor.Generator<HighlightInfo> generator = proc -> ContainerUtil.process(infos, proc); SweepProcessor.sweep(generator, (offset, info, atStart, overlappingIntervals) -> { if (!atStart) return true; if (!info.isFromInjection() && info.getEndOffset() < document.getTextLength() && (info.getEndOffset() <= startOffset || info.getStartOffset()>=endOffset)) return true; // injections are oblivious to restricting range if (info.isFileLevelAnnotation()) { codeAnalyzer.addFileLevelHighlight(project, group, info, psiFile); changed[0] = true; return true; } if (isWarningCoveredByError(info, overlappingIntervals, severityRegistrar)) { return true; } if (info.getStartOffset() < priorityRange.getStartOffset() || info.getEndOffset() > priorityRange.getEndOffset()) { createOrReuseHighlighterFor(info, colorsScheme, document, group, psiFile, (MarkupModelEx)markup, infosToRemove, ranges2markersCache, severityRegistrar); changed[0] = true; } return true; }); for (RangeHighlighter highlighter : infosToRemove.forAllInGarbageBin()) { highlighter.dispose(); changed[0] = true; } if (changed[0]) { clearWhiteSpaceOptimizationFlag(document); } assertMarkupConsistent(markup, project); } static void setHighlightersInRange(@NotNull final Project project, @NotNull final Document document, @NotNull final TextRange range, @Nullable final EditorColorsScheme colorsScheme, // if null global scheme will be used @NotNull final List<? extends HighlightInfo> infos, @NotNull final MarkupModelEx markup, final int group) { ApplicationManager.getApplication().assertIsDispatchThread(); final SeverityRegistrar severityRegistrar = SeverityRegistrar.getSeverityRegistrar(project); final HighlightersRecycler infosToRemove = new HighlightersRecycler(); DaemonCodeAnalyzerEx.processHighlights(document, project, null, range.getStartOffset(), range.getEndOffset(), info -> { if (info.getGroup() == group) { RangeHighlighter highlighter = info.getHighlighter(); int hiStart = highlighter.getStartOffset(); int hiEnd = highlighter.getEndOffset(); boolean willBeRemoved = hiEnd == document.getTextLength() && range.getEndOffset() == document.getTextLength() /*|| range.intersectsStrict(hiStart, hiEnd)*/ || range.containsRange(hiStart, hiEnd) /*|| hiStart <= range.getStartOffset() && hiEnd >= range.getEndOffset()*/; if (willBeRemoved) { infosToRemove.recycleHighlighter(highlighter); info.setHighlighter(null); } } return true; }); ContainerUtil.quickSort(infos, BY_START_OFFSET_NODUPS); final Map<TextRange, RangeMarker> ranges2markersCache = new THashMap<>(10); final PsiFile psiFile = PsiDocumentManager.getInstance(project).getPsiFile(document); final DaemonCodeAnalyzerEx codeAnalyzer = DaemonCodeAnalyzerEx.getInstanceEx(project); final boolean[] changed = {false}; SweepProcessor.Generator<HighlightInfo> generator = (Processor<HighlightInfo> processor) -> ContainerUtil.process(infos, processor); SweepProcessor.sweep(generator, (offset, info, atStart, overlappingIntervals) -> { if (!atStart) { return true; } if (info.isFileLevelAnnotation() && psiFile != null && psiFile.getViewProvider().isPhysical()) { codeAnalyzer.addFileLevelHighlight(project, group, info, psiFile); changed[0] = true; return true; } if (isWarningCoveredByError(info, overlappingIntervals, severityRegistrar)) { return true; } if (info.getStartOffset() >= range.getStartOffset() && info.getEndOffset() <= range.getEndOffset() && psiFile != null) { createOrReuseHighlighterFor(info, colorsScheme, document, group, psiFile, markup, infosToRemove, ranges2markersCache, severityRegistrar); changed[0] = true; } return true; }); for (RangeHighlighter highlighter : infosToRemove.forAllInGarbageBin()) { highlighter.dispose(); changed[0] = true; } if (changed[0]) { clearWhiteSpaceOptimizationFlag(document); } assertMarkupConsistent(markup, project); } private static boolean isWarningCoveredByError(@NotNull HighlightInfo info, @NotNull Collection<? extends HighlightInfo> overlappingIntervals, @NotNull SeverityRegistrar severityRegistrar) { if (!isSevere(info, severityRegistrar)) { for (HighlightInfo overlapping : overlappingIntervals) { if (isCovered(info, severityRegistrar, overlapping)) return true; } } return false; } private static boolean isCovered(@NotNull HighlightInfo warning, @NotNull SeverityRegistrar severityRegistrar, @NotNull HighlightInfo candidate) { if (!isCoveredByOffsets(warning, candidate)) return false; HighlightSeverity severity = candidate.getSeverity(); if (severity == HighlightInfoType.SYMBOL_TYPE_SEVERITY) return false; // syntax should not interfere with warnings return isSevere(candidate, severityRegistrar); } private static boolean isSevere(@NotNull HighlightInfo info, @NotNull SeverityRegistrar severityRegistrar) { HighlightSeverity severity = info.getSeverity(); return severityRegistrar.compare(HighlightSeverity.ERROR, severity) <= 0 || severity == HighlightInfoType.SYMBOL_TYPE_SEVERITY; } private static void createOrReuseHighlighterFor(@NotNull final HighlightInfo info, @Nullable final EditorColorsScheme colorsScheme, // if null global scheme will be used @NotNull final Document document, final int group, @NotNull final PsiFile psiFile, @NotNull MarkupModelEx markup, @Nullable HighlightersRecycler infosToRemove, @NotNull final Map<TextRange, RangeMarker> ranges2markersCache, @NotNull SeverityRegistrar severityRegistrar) { int infoStartOffset = info.startOffset; int infoEndOffset = info.endOffset; final int docLength = document.getTextLength(); if (infoEndOffset > docLength) { infoEndOffset = docLength; infoStartOffset = Math.min(infoStartOffset, infoEndOffset); } if (infoEndOffset == infoStartOffset && !info.isAfterEndOfLine()) { if (infoEndOffset == docLength) return; // empty highlighter beyond file boundaries infoEndOffset++; //show something in case of empty highlightinfo } info.setGroup(group); int layer = getLayer(info, severityRegistrar); RangeHighlighterEx highlighter = infosToRemove == null ? null : (RangeHighlighterEx)infosToRemove.pickupHighlighterFromGarbageBin(infoStartOffset, infoEndOffset, layer); final TextRange finalInfoRange = new TextRange(infoStartOffset, infoEndOffset); final TextAttributes infoAttributes = info.getTextAttributes(psiFile, colorsScheme); Consumer<RangeHighlighterEx> changeAttributes = finalHighlighter -> { if (infoAttributes != null) { finalHighlighter.setTextAttributes(infoAttributes); } info.setHighlighter(finalHighlighter); finalHighlighter.setAfterEndOfLine(info.isAfterEndOfLine()); Color color = info.getErrorStripeMarkColor(psiFile, colorsScheme); finalHighlighter.setErrorStripeMarkColor(color); if (info != finalHighlighter.getErrorStripeTooltip()) { finalHighlighter.setErrorStripeTooltip(info); } GutterMark renderer = info.getGutterIconRenderer(); finalHighlighter.setGutterIconRenderer((GutterIconRenderer)renderer); ranges2markersCache.put(finalInfoRange, info.getHighlighter()); if (info.quickFixActionRanges != null) { List<Pair<HighlightInfo.IntentionActionDescriptor, RangeMarker>> list = new ArrayList<>(info.quickFixActionRanges.size()); for (Pair<HighlightInfo.IntentionActionDescriptor, TextRange> pair : info.quickFixActionRanges) { TextRange textRange = pair.second; RangeMarker marker = getOrCreate(document, ranges2markersCache, textRange); list.add(Pair.create(pair.first, marker)); } info.quickFixActionMarkers = ContainerUtil.createLockFreeCopyOnWriteList(list); } ProperTextRange fixRange = info.getFixTextRange(); if (finalInfoRange.equals(fixRange)) { info.fixMarker = null; // null means it the same as highlighter' } else { info.fixMarker = getOrCreate(document, ranges2markersCache, fixRange); } }; if (highlighter == null) { highlighter = markup.addRangeHighlighterAndChangeAttributes(infoStartOffset, infoEndOffset, layer, null, HighlighterTargetArea.EXACT_RANGE, false, changeAttributes); if (HighlightInfoType.VISIBLE_IF_FOLDED.contains(info.type)) { highlighter.setVisibleIfFolded(true); } } else { markup.changeAttributesInBatch(highlighter, changeAttributes); } if (infoAttributes != null) { boolean attributesSet = Comparing.equal(infoAttributes, highlighter.getTextAttributes()); assert attributesSet : "Info: " + infoAttributes + "; colorsScheme: " + (colorsScheme == null ? "[global]" : colorsScheme.getName()) + "; highlighter:" + highlighter.getTextAttributes(); } } private static int getLayer(@NotNull HighlightInfo info, @NotNull SeverityRegistrar severityRegistrar) { final HighlightSeverity severity = info.getSeverity(); int layer; if (severity == HighlightSeverity.WARNING) { layer = HighlighterLayer.WARNING; } else if (severity == HighlightSeverity.WEAK_WARNING) { layer = HighlighterLayer.WEAK_WARNING; } else if (severityRegistrar.compare(severity, HighlightSeverity.ERROR) >= 0) { layer = HighlighterLayer.ERROR; } else if (severity == HighlightInfoType.INJECTED_FRAGMENT_SEVERITY) { layer = HighlighterLayer.CARET_ROW-1; } else if (severity == HighlightInfoType.ELEMENT_UNDER_CARET_SEVERITY) { layer = HighlighterLayer.ELEMENT_UNDER_CARET; } else { layer = HighlighterLayer.ADDITIONAL_SYNTAX; } return layer; } @NotNull private static RangeMarker getOrCreate(@NotNull Document document, @NotNull Map<TextRange, RangeMarker> ranges2markersCache, @NotNull TextRange textRange) { return ranges2markersCache.computeIfAbsent(textRange, __ -> document.createRangeMarker(textRange)); } private static final Key<Boolean> TYPING_INSIDE_HIGHLIGHTER_OCCURRED = Key.create("TYPING_INSIDE_HIGHLIGHTER_OCCURRED"); static boolean isWhitespaceOptimizationAllowed(@NotNull Document document) { return document.getUserData(TYPING_INSIDE_HIGHLIGHTER_OCCURRED) == null; } private static void disableWhiteSpaceOptimization(@NotNull Document document) { document.putUserData(TYPING_INSIDE_HIGHLIGHTER_OCCURRED, Boolean.TRUE); } private static void clearWhiteSpaceOptimizationFlag(@NotNull Document document) { document.putUserData(TYPING_INSIDE_HIGHLIGHTER_OCCURRED, null); } static void updateHighlightersByTyping(@NotNull Project project, @NotNull DocumentEvent e) { ApplicationManager.getApplication().assertIsDispatchThread(); final Document document = e.getDocument(); if (document instanceof DocumentEx && ((DocumentEx)document).isInBulkUpdate()) return; final MarkupModel markup = DocumentMarkupModel.forDocument(document, project, true); assertMarkupConsistent(markup, project); final int start = e.getOffset() - 1; final int end = start + e.getOldLength(); final List<HighlightInfo> toRemove = new ArrayList<>(); DaemonCodeAnalyzerEx.processHighlights(document, project, null, start, end, info -> { if (!info.needUpdateOnTyping()) return true; RangeHighlighter highlighter = info.getHighlighter(); int highlighterStart = highlighter.getStartOffset(); int highlighterEnd = highlighter.getEndOffset(); if (info.isAfterEndOfLine()) { if (highlighterStart < document.getTextLength()) { highlighterStart += 1; } if (highlighterEnd < document.getTextLength()) { highlighterEnd += 1; } } if (!highlighter.isValid() || start < highlighterEnd && highlighterStart <= end) { toRemove.add(info); } return true; }); for (HighlightInfo info : toRemove) { if (!info.getHighlighter().isValid() || info.type.equals(HighlightInfoType.WRONG_REF)) { info.getHighlighter().dispose(); } } assertMarkupConsistent(markup, project); if (!toRemove.isEmpty()) { disableWhiteSpaceOptimization(document); } } private static void assertMarkupConsistent(@NotNull final MarkupModel markup, @NotNull Project project) { if (!RedBlackTree.VERIFY) { return; } Document document = markup.getDocument(); DaemonCodeAnalyzerEx.processHighlights(document, project, null, 0, document.getTextLength(), info -> { assert ((MarkupModelEx)markup).containsHighlighter(info.getHighlighter()); return true; }); RangeHighlighter[] allHighlighters = markup.getAllHighlighters(); for (RangeHighlighter highlighter : allHighlighters) { if (!highlighter.isValid()) continue; HighlightInfo info = HighlightInfo.fromRangeHighlighter(highlighter); if (info == null) continue; boolean contains = !DaemonCodeAnalyzerEx .processHighlights(document, project, null, info.getActualStartOffset(), info.getActualEndOffset(), highlightInfo -> BY_START_OFFSET_NODUPS.compare(highlightInfo, info) != 0); assert contains: info; } } }
IDEA-198593 Errors reported by JSON annotator disappear only on typing
platform/analysis-impl/src/com/intellij/codeInsight/daemon/impl/UpdateHighlightersUtil.java
IDEA-198593 Errors reported by JSON annotator disappear only on typing
<ide><path>latform/analysis-impl/src/com/intellij/codeInsight/daemon/impl/UpdateHighlightersUtil.java <ide> RangeHighlighter highlighter = info.getHighlighter(); <ide> int hiStart = highlighter.getStartOffset(); <ide> int hiEnd = highlighter.getEndOffset(); <del> if (!info.isFromInjection() && hiEnd < document.getTextLength() && (hiEnd <= startOffset || hiStart >= endOffset)) { <add> if (!info.isFromInjection() && hiEnd < document.getTextLength() && (hiEnd != 0 && hiEnd <= startOffset || hiStart >= endOffset)) { <ide> return true; // injections are oblivious to restricting range <ide> } <ide> boolean toRemove = infoSet.contains(info) ||
Java
bsd-3-clause
508869a9e1c61d4f90c0dd7447861bd98c3587a8
0
NCIP/cab2b,NCIP/cab2b,NCIP/cab2b
package edu.wustl.cab2b.client.ui.searchDataWizard; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Dimension; import java.awt.FlowLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import javax.swing.JDialog; import javax.swing.JFileChooser; import javax.swing.JScrollPane; import edu.wustl.cab2b.client.ui.controls.Cab2bButton; import edu.wustl.cab2b.client.ui.controls.Cab2bLabel; import edu.wustl.cab2b.client.ui.controls.Cab2bPanel; import edu.wustl.cab2b.client.ui.mainframe.MainFrame; import edu.wustl.cab2b.client.ui.mainframe.NewWelcomePanel; import edu.wustl.cab2b.client.ui.util.WindowUtilities; import edu.wustl.cab2b.common.domain.DCQL; /** * This class displays the DCQL Panel which shows the XML format of DCQL along with function of saving the XML file * * @author gaurav_mehta */ public class ShowDCQLPanel extends Cab2bPanel { /* JDialog in which the Panel is displayed */ private JDialog dialog; // Cab2bLabel in which the entire XML is final private Cab2bLabel xmlTextPane = new Cab2bLabel(); // Cab2bPanel for showing Success and failure messages final Cab2bPanel messagePanel = new Cab2bPanel(); private String dcqlString; /** * @param dcql */ public ShowDCQLPanel(DCQL dcql) { this.dcqlString = dcql.getDcqlQuery(); initGUI(); } private void initGUI() { String xmlText = new XmlParser().parseXml(dcqlString); xmlTextPane.setText(xmlText); xmlTextPane.setBackground(Color.WHITE); Cab2bPanel xmlPanel = new Cab2bPanel(); xmlPanel.add(xmlTextPane); xmlPanel.setBackground(Color.WHITE); JScrollPane scrollPane = new JScrollPane(); scrollPane.getViewport().add(xmlPanel); scrollPane.getViewport().setBackground(Color.WHITE); Cab2bPanel xmlNavigationPanel = new Cab2bPanel(); Cab2bButton exportButton = new Cab2bButton("Export"); Cab2bButton cancelButton = new Cab2bButton("Cancel"); // Action Listener for Export Button exportButton.addActionListener(new ExportButtonListner()); // Action Listener for Cancel Button cancelButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent actionEvent) { dialog.dispose(); } }); Cab2bPanel buttonPanel = new Cab2bPanel(); FlowLayout flowLayout = new FlowLayout(FlowLayout.RIGHT); buttonPanel.setLayout(flowLayout); buttonPanel.add(exportButton); buttonPanel.add(cancelButton); buttonPanel.setBackground(new Color(240, 240, 240)); xmlNavigationPanel.add("br left", messagePanel); xmlNavigationPanel.add("hfill", buttonPanel); xmlNavigationPanel.setPreferredSize(new Dimension(880, 50)); xmlNavigationPanel.setBackground(new Color(240, 240, 240)); setLayout(new BorderLayout()); add(scrollPane, BorderLayout.CENTER); add(xmlNavigationPanel, BorderLayout.SOUTH); } /** * JDialog for showing DCQL XML Details Panel * * @return */ public JDialog showInDialog() { Dimension dimension = MainFrame.getScreenDimesion(); dialog = WindowUtilities.setInDialog(NewWelcomePanel.getMainFrame(), this, "DCQL Xml", new Dimension( (int) (dimension.width * 0.77), (int) (dimension.height * 0.65)), true, false); dialog.setVisible(true); return dialog; } private boolean writeFile(File file, String dataString) { try { PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(file))); out.print(dataString); out.flush(); out.close(); } catch (IOException e) { return false; } return true; } /** * Action listener class for Export Button. It saves the XML file to user defined location in user defined format */ class ExportButtonListner implements ActionListener { public void actionPerformed(ActionEvent actionEvent) { JFileChooser fileChooser = new JFileChooser(); // A call to JFileChooser's ShowSaveDialog PopUp fileChooser.showSaveDialog(NewWelcomePanel.getMainFrame()); File file = fileChooser.getSelectedFile(); // Function call for writing the File and saving it boolean saveReturnValue = writeFile(file, dcqlString); if (saveReturnValue == true) { Cab2bLabel successResultLabel = new Cab2bLabel("File Saved Successfully"); successResultLabel.setForeground(Color.GREEN); messagePanel.add(successResultLabel); messagePanel.repaint(); } else { Cab2bLabel failureResultLabel = new Cab2bLabel("File Could not be Saved"); failureResultLabel.setForeground(Color.RED); messagePanel.add(failureResultLabel); messagePanel.repaint(); } } } }
source/client/main/edu/wustl/cab2b/client/ui/searchDataWizard/ShowDCQLPanel.java
package edu.wustl.cab2b.client.ui.searchDataWizard; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Dimension; import java.awt.FlowLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import javax.swing.JDialog; import javax.swing.JFileChooser; import javax.swing.JScrollPane; import edu.wustl.cab2b.client.ui.controls.Cab2bButton; import edu.wustl.cab2b.client.ui.controls.Cab2bLabel; import edu.wustl.cab2b.client.ui.controls.Cab2bPanel; import edu.wustl.cab2b.client.ui.mainframe.MainFrame; import edu.wustl.cab2b.client.ui.mainframe.NewWelcomePanel; import edu.wustl.cab2b.client.ui.util.WindowUtilities; import edu.wustl.cab2b.common.domain.DCQL; /** * This class displays the DCQL Panel which shows the XML format of DCQL along with function of saving the XML file * * @author gaurav_mehta */ public class ShowDCQLPanel extends Cab2bPanel { /* JDialog in which the Panel is displayed */ private JDialog dialog; // Cab2bLabel in which the entire XML is final private Cab2bLabel xmlTextPane = new Cab2bLabel(); // Cab2bPanel for showing Success and failure messages final Cab2bPanel messagePanel = new Cab2bPanel(); /** * @param dcql */ public ShowDCQLPanel(DCQL dcql) { initGUI(dcql); } private void initGUI(DCQL dcql) { String xmlText = new XmlParser().parseXml(dcql.getDcqlQuery()); xmlTextPane.setText(xmlText); xmlTextPane.setBackground(Color.WHITE); Cab2bPanel xmlPanel = new Cab2bPanel(); xmlPanel.add(xmlTextPane); xmlPanel.setBackground(Color.WHITE); JScrollPane scrollPane = new JScrollPane(); scrollPane.getViewport().add(xmlPanel); scrollPane.getViewport().setBackground(Color.WHITE); Cab2bPanel xmlNavigationPanel = new Cab2bPanel(); Cab2bButton exportButton = new Cab2bButton("Export"); Cab2bButton cancelButton = new Cab2bButton("Cancel"); // Action Listener for Export Button exportButton.addActionListener(new ExportButtonListner()); // Action Listener for Cancel Button cancelButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent actionEvent) { dialog.dispose(); } }); Cab2bPanel buttonPanel = new Cab2bPanel(); FlowLayout flowLayout = new FlowLayout(FlowLayout.RIGHT); buttonPanel.setLayout(flowLayout); buttonPanel.add(exportButton); buttonPanel.add(cancelButton); buttonPanel.setBackground(new Color(240, 240, 240)); xmlNavigationPanel.add("br left", messagePanel); xmlNavigationPanel.add("hfill", buttonPanel); xmlNavigationPanel.setPreferredSize(new Dimension(880, 50)); xmlNavigationPanel.setBackground(new Color(240, 240, 240)); setLayout(new BorderLayout()); add(scrollPane, BorderLayout.CENTER); add(xmlNavigationPanel, BorderLayout.SOUTH); } /** * JDialog for showing DCQL XML Details Panel * * @return */ public JDialog showInDialog() { Dimension dimension = MainFrame.getScreenDimesion(); dialog = WindowUtilities.setInDialog(NewWelcomePanel.getMainFrame(), this, "DCQL Xml", new Dimension( (int) (dimension.width * 0.77), (int) (dimension.height * 0.65)), true, false); dialog.setVisible(true); return dialog; } private boolean writeFile(File file, String dataString) { try { PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(file))); out.print(dataString); out.flush(); out.close(); } catch (IOException e) { return false; } return true; } /** * Action listener class for Export Button. It saves the XML file to user defined location in user defined format */ class ExportButtonListner implements ActionListener { public void actionPerformed(ActionEvent actionEvent) { JFileChooser fileChooser = new JFileChooser(); // A call to JFileChooser's ShowSaveDialog PopUp fileChooser.showSaveDialog(NewWelcomePanel.getMainFrame()); File file = fileChooser.getSelectedFile(); // Function call for writing the File and saving it boolean saveReturnValue = writeFile(file, xmlTextPane.getText()); if (saveReturnValue == true) { Cab2bLabel successResultLabel = new Cab2bLabel("File Saved Successfully"); successResultLabel.setForeground(Color.GREEN); messagePanel.add(successResultLabel); messagePanel.repaint(); } else { Cab2bLabel failureResultLabel = new Cab2bLabel("File Could not be Saved"); failureResultLabel.setForeground(Color.RED); messagePanel.add(failureResultLabel); messagePanel.repaint(); } } } }
Fix for Bug # 10563
source/client/main/edu/wustl/cab2b/client/ui/searchDataWizard/ShowDCQLPanel.java
Fix for Bug # 10563
<ide><path>ource/client/main/edu/wustl/cab2b/client/ui/searchDataWizard/ShowDCQLPanel.java <ide> <ide> // Cab2bPanel for showing Success and failure messages <ide> final Cab2bPanel messagePanel = new Cab2bPanel(); <add> <add> private String dcqlString; <ide> <ide> /** <ide> * @param dcql <ide> */ <ide> public ShowDCQLPanel(DCQL dcql) { <del> initGUI(dcql); <add> this.dcqlString = dcql.getDcqlQuery(); <add> initGUI(); <ide> } <ide> <del> private void initGUI(DCQL dcql) { <del> String xmlText = new XmlParser().parseXml(dcql.getDcqlQuery()); <add> private void initGUI() { <add> String xmlText = new XmlParser().parseXml(dcqlString); <ide> <ide> xmlTextPane.setText(xmlText); <ide> xmlTextPane.setBackground(Color.WHITE); <ide> * Action listener class for Export Button. It saves the XML file to user defined location in user defined format <ide> */ <ide> class ExportButtonListner implements ActionListener { <add> <ide> public void actionPerformed(ActionEvent actionEvent) { <ide> JFileChooser fileChooser = new JFileChooser(); <ide> <ide> <ide> File file = fileChooser.getSelectedFile(); <ide> // Function call for writing the File and saving it <del> boolean saveReturnValue = writeFile(file, xmlTextPane.getText()); <add> boolean saveReturnValue = writeFile(file, dcqlString); <ide> if (saveReturnValue == true) { <ide> Cab2bLabel successResultLabel = new Cab2bLabel("File Saved Successfully"); <ide> successResultLabel.setForeground(Color.GREEN);
Java
bsd-3-clause
d9a5f8b141245b6d489995f667f343b4f49e449e
0
tinkerpop/gremlin,ccagnoli/gremlin,cesarmarinhorj/gremlin,cesarmarinhorj/gremlin,tinkerpop/gremlin,ccagnoli/gremlin,samanalysis/gremlin,samanalysis/gremlin
package com.tinkerpop.gremlin.pipes; import com.tinkerpop.pipes.AbstractPipe; import com.tinkerpop.pipes.ExpandableIterator; import com.tinkerpop.pipes.Pipe; import groovy.lang.Closure; import java.util.Iterator; import java.util.List; /** * @author Marko A. Rodriguez (http://markorodriguez.com) */ public class LoopPipe<S> extends AbstractPipe<S, S> { private final Closure doLoopClosure; private final Pipe<S, S> toLoopPipe; private ExpandableIterator<S> expando; public LoopPipe(final Pipe<S, S> toLoopPipe, final Closure doLoopClosure) { this.toLoopPipe = toLoopPipe; this.doLoopClosure = doLoopClosure; } protected S processNextStart() { while (true) { final S e = this.toLoopPipe.next(); if ((Boolean) doLoopClosure.call(e)) { this.expando.add(e); } else { return e; } } } public void setStarts(final Iterator<S> iterator) { this.expando = new ExpandableIterator<S>(iterator); this.toLoopPipe.setStarts(this.expando); } public String toString() { return super.toString() + "<" + this.toLoopPipe + ">"; } public List getPath() { return this.toLoopPipe.getPath(); } }
src/main/java/com/tinkerpop/gremlin/pipes/LoopPipe.java
package com.tinkerpop.gremlin.pipes; import com.tinkerpop.pipes.AbstractPipe; import com.tinkerpop.pipes.ExpandableIterator; import com.tinkerpop.pipes.Pipe; import groovy.lang.Closure; import java.util.Iterator; import java.util.List; /** * @author Marko A. Rodriguez (http://markorodriguez.com) */ public class LoopPipe<S> extends AbstractPipe<S, S> { private final Closure doLoopClosure; private final Pipe<S, S> toLoopPipe; private ExpandableIterator<S> expando; public LoopPipe(final Pipe<S, S> toLoopPipe, final Closure doLoopClosure) { this.toLoopPipe = toLoopPipe; this.doLoopClosure = doLoopClosure; } protected S processNextStart() { while (true) { final S e = this.toLoopPipe.next(); if ((Boolean) doLoopClosure.call(e)) { this.expando.add(e); } else { return e; } } } public void setStarts(final Iterator<S> iterator) { this.expando = new ExpandableIterator<S>(iterator); this.toLoopPipe.setStarts(this.expando); } public String toString() { return super.toString() + "[" + this.toLoopPipe + "]"; } public List getPath() { return this.toLoopPipe.getPath(); } }
fixed toString() of LoopPipe.
src/main/java/com/tinkerpop/gremlin/pipes/LoopPipe.java
fixed toString() of LoopPipe.
<ide><path>rc/main/java/com/tinkerpop/gremlin/pipes/LoopPipe.java <ide> } <ide> <ide> public String toString() { <del> return super.toString() + "[" + this.toLoopPipe + "]"; <add> return super.toString() + "<" + this.toLoopPipe + ">"; <ide> } <ide> <ide> public List getPath() {
Java
mit
eb608ac4a4017128804782f45b6ef742b989b622
0
RokKos/FRI_Programiranje,RokKos/FRI_Programiranje,RokKos/FRI_Programiranje,RokKos/FRI_Programiranje,RokKos/FRI_Programiranje,RokKos/FRI_Programiranje,RokKos/FRI_Programiranje
/** * @author sliva */ package compiler.phases.abstr; import java.util.*; import compiler.common.report.*; import compiler.data.dertree.*; import compiler.data.dertree.DerNode.Nont; import compiler.data.dertree.visitor.*; import compiler.data.symbol.Symbol.Term; import compiler.data.abstree.*; import compiler.data.abstree.AbsBinExpr.Oper; /** * Transforms a derivation tree to an abstract syntax tree. * * @author sliva */ public class AbsTreeConstructor implements DerVisitor<AbsTree, AbsTree> { private final String PTR_NODE = "Expected PTR node"; private final String ARR_NODE = "Expected ARR node"; private final String GOT_STR = " got: "; private final String TOO_MANY_NODES = "There are zore or more than 3 nodes"; private final String DECL_NODE = "Declaration node doesn't start with TYP, FUN or VAR"; private final String WRONG_BINARY_NODE = "This binary operator doesn't exist."; private final Location kNULL_LOCATION = new Location(0, 0); @Override public AbsTree visit(DerLeaf leaf, AbsTree visArg) { throw new Report.InternalError(); } @Override public AbsTree visit(DerNode node, AbsTree visArg) { switch (node.label) { case Source: { AbsDecls decls = (AbsDecls) node.subtree(0).accept(this, null); return new AbsSource(decls, decls); } case Decls: case DeclsRest: { if (node.numSubtrees() == 0) return null; Vector<AbsDecl> allDecls = new Vector<AbsDecl>(); AbsDecl decl = (AbsDecl) node.subtree(0).accept(this, null); allDecls.add(decl); AbsDecls decls = (AbsDecls) node.subtree(1).accept(this, null); if (decls != null) allDecls.addAll(decls.decls()); return new AbsDecls(new Location(decl, decls == null ? decl : decls), allDecls); } case Decl: { DerLeaf typeOfDecleration = (DerLeaf) node.subtree(0); switch (typeOfDecleration.symb.token) { case VAR: { AbsParDecl parDecl = (AbsParDecl) node.subtree(1).accept(this, null); Location loc = new Location(typeOfDecleration, parDecl); return new AbsVarDecl(loc, parDecl.name, parDecl.type); } case TYP: { AbsParDecl parDecl = (AbsParDecl) node.subtree(1).accept(this, null); Location loc = new Location(typeOfDecleration, parDecl); return new AbsTypDecl(loc, parDecl.name, parDecl.type); } case FUN: { DerLeaf funName = (DerLeaf) node.subtree(1); AbsParDecls parDecls = (AbsParDecls) node.subtree(2).accept(this, null); AbsType type = (AbsType) node.subtree(3).accept(this, null); AbsExpr expr = (AbsExpr) node.subtree(4).accept(this, null); if (expr.location().equals(kNULL_LOCATION)) { Location loc = new Location(typeOfDecleration, type); return new AbsFunDecl(loc, funName.symb.lexeme, parDecls, type); } else { Location loc = new Location(typeOfDecleration, expr); return new AbsFunDef(loc, funName.symb.lexeme, parDecls, type, expr); } } default: throw new Report.Error(typeOfDecleration.location(), DECL_NODE + GOT_STR + typeOfDecleration.symb.token.toString()); } } case ParDecl: { return DeformParDecl(node); } case Type: { if (node.numSubtrees() == 1) { // This is only check for ( Type ) if (node.subtree(0) instanceof DerNode) { return node.subtree(0).accept(this, null); } else if (node.subtree(0) instanceof DerLeaf) { DerLeaf primitiveTypeNode = (DerLeaf) node.subtree(0); AbsAtomType.Type primitiveType; switch (primitiveTypeNode.symb.token) { case VOID: primitiveType = AbsAtomType.Type.VOID; break; case BOOL: primitiveType = AbsAtomType.Type.BOOL; break; case CHAR: primitiveType = AbsAtomType.Type.CHAR; break; case INT: primitiveType = AbsAtomType.Type.INT; break; default: // TODO: Error (Maybe) return new AbsTypName(primitiveTypeNode.location(), primitiveTypeNode.symb.lexeme); } return new AbsAtomType(primitiveTypeNode.location(), primitiveType); } } else if (node.numSubtrees() == 2) { DerLeaf ptr = (DerLeaf) node.subtree(0); if (ptr.symb.token == Term.PTR) { AbsType subType = (AbsType) node.subtree(1).accept(this, null); Location loc = new Location(ptr, subType); return new AbsPtrType(loc, subType); } else if (ptr.symb.token == Term.REC) { AbsCompDecls compDecls = (AbsCompDecls) node.subtree(1).accept(this, null); Location loc = new Location(ptr, compDecls); return new AbsRecType(loc, compDecls); } else { throw new Report.Error(ptr.location(), PTR_NODE + GOT_STR + ptr.symb.token.toString()); } } else if (node.numSubtrees() == 3) { DerLeaf arr = (DerLeaf) node.subtree(0); if (arr.symb.token != Term.ARR) { throw new Report.Error(arr.location(), ARR_NODE + GOT_STR + arr.symb.token.toString()); } AbsExpr length = (AbsExpr) node.subtree(1).accept(this, null); AbsType elemType = (AbsType) node.subtree(2).accept(this, null); Location loc = new Location(arr, elemType); return new AbsArrType(loc, length, elemType); } else { throw new Report.Error(node.location(), TOO_MANY_NODES + GOT_STR + node.numSubtrees()); } } case ParDecls: { return DeformParDecls(node); } case ParDeclsRest: { return DeformParDecls(node); } case BodyEps: { if (node.numSubtrees() == 0) { // Hacky try to find other expr that is not abstrac and has less field return Epsilon(); } return node.subtree(1).accept(this, null); } case CompDecls: case CompDeclsRest: { if (node.numSubtrees() == 0) { return null; } Vector<AbsCompDecl> allCompDecls = new Vector<AbsCompDecl>(); AbsCompDecl compDecl = (AbsCompDecl) node.subtree(0).accept(this, null); allCompDecls.add(compDecl); AbsCompDecls compDecls = (AbsCompDecls) node.subtree(1).accept(this, null); if (compDecls != null) { allCompDecls.addAll(compDecls.compDecls()); } Location loc = new Location(compDecl, compDecls == null ? compDecl : compDecls); return new AbsCompDecls(loc, allCompDecls); } case CompDecl: { return DeformCompDecl(node); } case Expr: case DisjExpr: case ConjExpr: case RelExpr: case AddExpr: case MulExpr: { return ExpressionTransform(node, visArg); } case DisjExprRest: case ConjExprRest: case AddExprRest: case MulExprRest: { if (node.numSubtrees() == 0) { return visArg; } DerLeaf operatorNode = (DerLeaf) node.subtree(0); AbsBinExpr.Oper oper = kTermToBinOper.get(operatorNode.symb.token); if (oper == null) { throw new Report.Error(node.location(), WRONG_BINARY_NODE + GOT_STR + node.numSubtrees()); } AbsExpr leftOperand = (AbsExpr) node.subtree(1).accept(this, null); Location loc = new Location(visArg, leftOperand); AbsBinExpr binExpr = new AbsBinExpr(loc, oper, (AbsExpr) visArg, leftOperand); AbsExpr rightOperand = (AbsExpr) node.subtree(2).accept(this, binExpr); // rightOperand.location()); return rightOperand; } case RelExprRest: { if (node.numSubtrees() == 0) { return visArg; } DerLeaf operatorNode = (DerLeaf) node.subtree(0); AbsBinExpr.Oper oper = kTermToBinOper.get(operatorNode.symb.token); if (oper == null) { throw new Report.Error(node.location(), WRONG_BINARY_NODE + GOT_STR + node.numSubtrees()); } AbsExpr leftOperand = (AbsExpr) node.subtree(1).accept(this, null); Location loc = new Location(visArg, leftOperand); return new AbsBinExpr(loc, oper, (AbsExpr) visArg, leftOperand); } case PrefExpr: { if (node.subtree(0) instanceof DerLeaf) { DerLeaf operatorNode = (DerLeaf) node.subtree(0); AbsUnExpr.Oper oper = kTermToUnarOper.get(operatorNode.symb.token); if (oper != null) { AbsExpr subExpr = (AbsExpr) node.subtree(1).accept(this, null); Location loc = new Location(operatorNode, subExpr); return new AbsUnExpr(loc, oper, subExpr); } if (operatorNode.symb.token == Term.NEW) { AbsType type = (AbsType) node.subtree(1).accept(this, null); Location loc = new Location(operatorNode, type); return new AbsNewExpr(loc, type); } if (operatorNode.symb.token == Term.DEL) { AbsExpr expr = (AbsExpr) node.subtree(1).accept(this, null); Location loc = new Location(operatorNode, expr); return new AbsDelExpr(loc, expr); } } else { DerNode exprNode = (DerNode) node.subtree(0); AbsExpr expr = (AbsExpr) exprNode.accept(this, null); if (exprNode.label == Nont.Expr) { return node.subtree(1).accept(this, expr); } else if (exprNode.label == Nont.PstfExpr) { return node.subtree(1).accept(this, expr); } } } case PstfExprRest: { if (node.numSubtrees() == 0) { return visArg; } if (node.subtree(0) instanceof DerLeaf) { DerLeaf varNode = (DerLeaf) node.subtree(0); AbsVarName varName = new AbsVarName(varNode, varNode.symb.lexeme); Location loc = new Location(visArg, varName); AbsRecExpr recordExpr = new AbsRecExpr(loc, (AbsExpr) visArg, varName); return node.subtree(1).accept(this, recordExpr); } else { AbsExpr index = (AbsExpr) node.subtree(0).accept(this, null); Location loc = new Location(visArg, index); AbsArrExpr arrExpr = new AbsArrExpr(loc, (AbsExpr) visArg, index); return node.subtree(1).accept(this, arrExpr); } } case PstfExpr: { return node.subtree(0).accept(this, null); } case AtomExpr: { if (node.numSubtrees() == 1) { return node.subtree(0).accept(this, null); } if (node.numSubtrees() == 2) { boolean isNull = node.subtree(1).accept(this, null) instanceof AbsAtomExpr && node.subtree(1).accept(this, null).location().equals(kNULL_LOCATION); if (isNull) { DerLeaf varName = (DerLeaf) node.subtree(0); Location loc = new Location(varName, varName); return new AbsVarName(loc, varName.symb.lexeme); } else { AbsArgs funArgs = (AbsArgs) node.subtree(1).accept(this, null); DerLeaf funName = (DerLeaf) node.subtree(0); Location loc = new Location(funName, funArgs.args().size() != 0 ? funArgs : funName); return new AbsFunName(loc, funName.symb.lexeme, funArgs); } } if (node.numSubtrees() == 3) { AbsStmts statements = (AbsStmts) node.subtree(0).accept(this, null); AbsExpr expr = (AbsExpr) node.subtree(1).accept(this, null); AbsDecls decls = (AbsDecls) node.subtree(2).accept(this, null); if (decls == null) { Location loc = new Location(statements, expr); return new AbsBlockExpr(loc, EpsilonDecls(), statements, expr); } Location loc = new Location(statements, decls); return new AbsBlockExpr(loc, decls, statements, expr); } } case Literal: { DerLeaf literal = (DerLeaf) node.subtree(0); return new AbsAtomExpr(literal, kTermToLitType.get(literal.symb.token), literal.symb.lexeme); } case CastEps: { if (node.numSubtrees() == 0) { return visArg; } AbsType type = (AbsType) node.subtree(0).accept(this, null); // Double check this for location Location loc = new Location(visArg, type); return new AbsCastExpr(loc, (AbsExpr) visArg, type); } case CallEps: if (node.numSubtrees() == 0) { return Epsilon(); } return node.subtree(0).accept(this, null); case Args: { if (node.numSubtrees() == 0) { return EpsilonArgs(); } return node.subtree(0).accept(this, null); } case ArgsEps: case ArgsRest: { if (node.numSubtrees() == 0) { return null; } Vector<AbsExpr> allExpr = new Vector<AbsExpr>(); AbsExpr expr = (AbsExpr) node.subtree(0).accept(this, null); allExpr.add(expr); AbsArgs args = (AbsArgs) node.subtree(1).accept(this, null); if (args != null) { allExpr.addAll(args.args()); } Location loc = new Location(expr, args == null ? expr : args); return new AbsArgs(loc, allExpr); } case Stmts: case StmtsRest: { if (node.numSubtrees() == 0) { return null; } Vector<AbsStmt> allStmts = new Vector<AbsStmt>(); AbsStmt stmt = (AbsStmt) node.subtree(0).accept(this, null); allStmts.add(stmt); AbsStmts stmts = (AbsStmts) node.subtree(1).accept(this, null); if (stmts != null) { allStmts.addAll(stmts.stmts()); } Location loc = new Location(stmt, stmts == null ? stmt : stmts); return new AbsStmts(loc, allStmts); } case Stmt: { if (node.numSubtrees() == 2) { AbsExpr expr = (AbsExpr) node.subtree(0).accept(this, null); return node.subtree(1).accept(this, expr); } if (node.numSubtrees() == 3) { AbsExpr condition = (AbsExpr) node.subtree(1).accept(this, null); AbsStmts doStatements = (AbsStmts) node.subtree(2).accept(this, null); Location loc = new Location((DerLeaf) node.subtree(0), doStatements); return new AbsWhileStmt(loc, condition, doStatements); } if (node.numSubtrees() == 4) { AbsExpr condition = (AbsExpr) node.subtree(1).accept(this, null); AbsStmts thenStatements = (AbsStmts) node.subtree(2).accept(this, null); AbsStmts elseStatements = (AbsStmts) node.subtree(3).accept(this, null); if (elseStatements == null) { Location loc = new Location((DerLeaf) node.subtree(0), thenStatements); return new AbsIfStmt(loc, condition, thenStatements, EpsilonStmts()); } Location loc = new Location((DerLeaf) node.subtree(0), elseStatements); return new AbsIfStmt(loc, condition, thenStatements, elseStatements); } } case AssignEps: { if (node.numSubtrees() == 0) { return new AbsExprStmt(visArg, (AbsExpr) visArg); } AbsExpr rightSide = (AbsExpr) node.subtree(0).accept(this, null); Location loc = new Location(visArg, rightSide); return new AbsAssignStmt(loc, (AbsExpr) visArg, rightSide); } case ElseEps: case WhereEps: { if (node.numSubtrees() == 0) { return null; } return node.subtree(0).accept(this, null); } } // End Switch return visArg; } // Helper function private AbsTree DeformParDecls(DerNode node) { if (node.numSubtrees() == 0) { return new AbsParDecls(kNULL_LOCATION, new Vector<AbsParDecl>()); } Vector<AbsParDecl> allParDecls = new Vector<AbsParDecl>(); AbsParDecl parDecl = (AbsParDecl) node.subtree(0).accept(this, null); allParDecls.add(parDecl); AbsParDecls parDecls = (AbsParDecls) node.subtree(1).accept(this, null); if (parDecls != null) { allParDecls.addAll(parDecls.parDecls()); } Location loc = new Location(parDecl, parDecls.equals(kNULL_LOCATION) ? parDecl : parDecls); return new AbsParDecls(loc, allParDecls); } private AbsTree DeformParDecl(DerNode node) { DerLeaf identifier = (DerLeaf) node.subtree(0); AbsType type = (AbsType) node.subtree(1).accept(this, null); Location loc = new Location(identifier, type); return new AbsParDecl(loc, identifier.symb.lexeme, type); } // Same function just other class private AbsTree DeformCompDecl(DerNode node) { DerLeaf identifier = (DerLeaf) node.subtree(0); AbsType type = (AbsType) node.subtree(1).accept(this, null); Location loc = new Location(identifier, type); return new AbsCompDecl(loc, identifier.symb.lexeme, type); } private AbsTree ExpressionTransform(DerNode node, AbsTree visArg) { if (node.numSubtrees() == 0) { return visArg; } if (node.numSubtrees() == 1) { return node.subtree(0).accept(this, null); } AbsExpr leftOperand = (AbsExpr) node.subtree(0).accept(this, null); AbsExpr rightOperand = (AbsExpr) node.subtree(1).accept(this, leftOperand); return rightOperand; } private AbsTree Epsilon() { // Hacky try to find other expr that is not abstrac and has less field return new AbsAtomExpr(kNULL_LOCATION, AbsAtomExpr.Type.VOID, ""); } private AbsArgs EpsilonArgs() { return new AbsArgs(kNULL_LOCATION, new Vector<AbsExpr>()); } private AbsDecls EpsilonDecls() { return new AbsDecls(kNULL_LOCATION, new Vector<AbsDecl>()); } private AbsStmts EpsilonStmts() { return new AbsStmts(kNULL_LOCATION, new Vector<AbsStmt>()); } private Map<Term, AbsBinExpr.Oper> kTermToBinOper = new HashMap<Term, AbsBinExpr.Oper>() { { put(Term.IOR, AbsBinExpr.Oper.IOR); put(Term.XOR, AbsBinExpr.Oper.XOR); put(Term.AND, AbsBinExpr.Oper.AND); put(Term.EQU, AbsBinExpr.Oper.EQU); put(Term.NEQ, AbsBinExpr.Oper.NEQ); put(Term.LTH, AbsBinExpr.Oper.LTH); put(Term.GTH, AbsBinExpr.Oper.GTH); put(Term.LEQ, AbsBinExpr.Oper.LEQ); put(Term.GEQ, AbsBinExpr.Oper.GEQ); put(Term.ADD, AbsBinExpr.Oper.ADD); put(Term.SUB, AbsBinExpr.Oper.SUB); put(Term.MUL, AbsBinExpr.Oper.MUL); put(Term.DIV, AbsBinExpr.Oper.DIV); put(Term.MOD, AbsBinExpr.Oper.MOD); } }; private Map<Term, AbsUnExpr.Oper> kTermToUnarOper = new HashMap<Term, AbsUnExpr.Oper>() { { put(Term.NOT, AbsUnExpr.Oper.NOT); put(Term.ADDR, AbsUnExpr.Oper.ADDR); put(Term.DATA, AbsUnExpr.Oper.DATA); put(Term.ADD, AbsUnExpr.Oper.ADD); put(Term.SUB, AbsUnExpr.Oper.SUB); } }; private Map<Term, AbsAtomExpr.Type> kTermToLitType = new HashMap<Term, AbsAtomExpr.Type>() { { put(Term.VOIDCONST, AbsAtomExpr.Type.VOID); put(Term.BOOLCONST, AbsAtomExpr.Type.BOOL); put(Term.PTRCONST, AbsAtomExpr.Type.PTR); put(Term.INTCONST, AbsAtomExpr.Type.INT); put(Term.CHARCONST, AbsAtomExpr.Type.CHAR); put(Term.STRCONST, AbsAtomExpr.Type.STR); } }; }
PREV/prev/srcs/compiler/phases/abstr/AbsTreeConstructor.java
/** * @author sliva */ package compiler.phases.abstr; import java.util.*; import compiler.common.report.*; import compiler.data.dertree.*; import compiler.data.dertree.DerNode.Nont; import compiler.data.dertree.visitor.*; import compiler.data.symbol.Symbol.Term; import compiler.data.abstree.*; import compiler.data.abstree.AbsBinExpr.Oper; /** * Transforms a derivation tree to an abstract syntax tree. * * @author sliva */ public class AbsTreeConstructor implements DerVisitor<AbsTree, AbsTree> { private final String PTR_NODE = "Expected PTR node"; private final String ARR_NODE = "Expected ARR node"; private final String GOT_STR = " got: "; private final String TOO_MANY_NODES = "There are zore or more than 3 nodes"; private final String DECL_NODE = "Declaration node doesn't start with TYP, FUN or VAR"; private final String WRONG_BINARY_NODE = "This binary operator doesn't exist."; private final Location kNULL_LOCATION = new Location(0, 0); @Override public AbsTree visit(DerLeaf leaf, AbsTree visArg) { throw new Report.InternalError(); } @Override public AbsTree visit(DerNode node, AbsTree visArg) { switch (node.label) { case Source: { AbsDecls decls = (AbsDecls) node.subtree(0).accept(this, null); return new AbsSource(decls, decls); } case Decls: case DeclsRest: { if (node.numSubtrees() == 0) return null; Vector<AbsDecl> allDecls = new Vector<AbsDecl>(); AbsDecl decl = (AbsDecl) node.subtree(0).accept(this, null); allDecls.add(decl); AbsDecls decls = (AbsDecls) node.subtree(1).accept(this, null); if (decls != null) allDecls.addAll(decls.decls()); return new AbsDecls(new Location(decl, decls == null ? decl : decls), allDecls); } case Decl: { DerLeaf typeOfDecleration = (DerLeaf) node.subtree(0); switch (typeOfDecleration.symb.token) { case VAR: { AbsParDecl parDecl = (AbsParDecl) node.subtree(1).accept(this, null); Location loc = new Location(typeOfDecleration, parDecl); return new AbsVarDecl(loc, parDecl.name, parDecl.type); } case TYP: { AbsParDecl parDecl = (AbsParDecl) node.subtree(1).accept(this, null); Location loc = new Location(typeOfDecleration, parDecl); return new AbsTypDecl(loc, parDecl.name, parDecl.type); } case FUN: { DerLeaf funName = (DerLeaf) node.subtree(1); AbsParDecls parDecls = (AbsParDecls) node.subtree(2).accept(this, null); AbsType type = (AbsType) node.subtree(3).accept(this, null); AbsExpr expr = (AbsExpr) node.subtree(4).accept(this, null); if (expr.location().equals(kNULL_LOCATION)) { Location loc = new Location(typeOfDecleration, type); return new AbsFunDecl(loc, funName.symb.lexeme, parDecls, type); } else { Location loc = new Location(typeOfDecleration, expr); return new AbsFunDef(loc, funName.symb.lexeme, parDecls, type, expr); } } default: throw new Report.Error(typeOfDecleration.location(), DECL_NODE + GOT_STR + typeOfDecleration.symb.token.toString()); } } case ParDecl: { return DeformParDecl(node); } case Type: { if (node.numSubtrees() == 1) { // This is only check for ( Type ) if (node.subtree(0) instanceof DerNode) { return node.subtree(0).accept(this, null); } else if (node.subtree(0) instanceof DerLeaf) { DerLeaf primitiveTypeNode = (DerLeaf) node.subtree(0); AbsAtomType.Type primitiveType; switch (primitiveTypeNode.symb.token) { case VOID: primitiveType = AbsAtomType.Type.VOID; break; case BOOL: primitiveType = AbsAtomType.Type.BOOL; break; case CHAR: primitiveType = AbsAtomType.Type.CHAR; break; case INT: primitiveType = AbsAtomType.Type.INT; break; default: // TODO: Error (Maybe) return new AbsTypName(primitiveTypeNode.location(), primitiveTypeNode.symb.lexeme); } return new AbsAtomType(primitiveTypeNode.location(), primitiveType); } } else if (node.numSubtrees() == 2) { DerLeaf ptr = (DerLeaf) node.subtree(0); if (ptr.symb.token == Term.PTR) { AbsType subType = (AbsType) node.subtree(1).accept(this, null); Location loc = new Location(ptr, subType); return new AbsPtrType(loc, subType); } else if (ptr.symb.token == Term.REC) { AbsCompDecls compDecls = (AbsCompDecls) node.subtree(1).accept(this, null); Location loc = new Location(ptr, compDecls); return new AbsRecType(loc, compDecls); } else { throw new Report.Error(ptr.location(), PTR_NODE + GOT_STR + ptr.symb.token.toString()); } } else if (node.numSubtrees() == 3) { DerLeaf arr = (DerLeaf) node.subtree(0); if (arr.symb.token != Term.ARR) { throw new Report.Error(arr.location(), ARR_NODE + GOT_STR + arr.symb.token.toString()); } AbsExpr length = (AbsExpr) node.subtree(1).accept(this, null); AbsType elemType = (AbsType) node.subtree(2).accept(this, null); Location loc = new Location(arr, elemType); return new AbsArrType(loc, length, elemType); } else { throw new Report.Error(node.location(), TOO_MANY_NODES + GOT_STR + node.numSubtrees()); } } case ParDecls: { return DeformParDecls(node); } case ParDeclsRest: { return DeformParDecls(node); } case BodyEps: { if (node.numSubtrees() == 0) { // Hacky try to find other expr that is not abstrac and has less field return Epsilon(); } return node.subtree(1).accept(this, null); } case CompDecls: case CompDeclsRest: { if (node.numSubtrees() == 0) { return null; } Vector<AbsCompDecl> allCompDecls = new Vector<AbsCompDecl>(); AbsCompDecl compDecl = (AbsCompDecl) node.subtree(0).accept(this, null); allCompDecls.add(compDecl); AbsCompDecls compDecls = (AbsCompDecls) node.subtree(1).accept(this, null); if (compDecls != null) { allCompDecls.addAll(compDecls.compDecls()); } Location loc = new Location(compDecl, compDecls == null ? compDecl : compDecls); return new AbsCompDecls(loc, allCompDecls); } case CompDecl: { return DeformCompDecl(node); } case Expr: case DisjExpr: case ConjExpr: case RelExpr: case AddExpr: case MulExpr: { return ExpressionTransform(node, visArg); } case DisjExprRest: case ConjExprRest: case AddExprRest: case MulExprRest: { if (node.numSubtrees() == 0) { return visArg; } DerLeaf operatorNode = (DerLeaf) node.subtree(0); AbsBinExpr.Oper oper = kTermToBinOper.get(operatorNode.symb.token); if (oper == null) { throw new Report.Error(node.location(), WRONG_BINARY_NODE + GOT_STR + node.numSubtrees()); } AbsExpr leftOperand = (AbsExpr) node.subtree(1).accept(this, null); Location loc = new Location(visArg, leftOperand); AbsBinExpr binExpr = new AbsBinExpr(loc, oper, (AbsExpr) visArg, leftOperand); AbsExpr rightOperand = (AbsExpr) node.subtree(2).accept(this, binExpr); // rightOperand.location()); return rightOperand; } case RelExprRest: { if (node.numSubtrees() == 0) { return visArg; } DerLeaf operatorNode = (DerLeaf) node.subtree(0); AbsBinExpr.Oper oper = kTermToBinOper.get(operatorNode.symb.token); if (oper == null) { throw new Report.Error(node.location(), WRONG_BINARY_NODE + GOT_STR + node.numSubtrees()); } AbsExpr leftOperand = (AbsExpr) node.subtree(1).accept(this, null); Location loc = new Location(visArg, leftOperand); return new AbsBinExpr(loc, oper, (AbsExpr) visArg, leftOperand); } case PrefExpr: { if (node.subtree(0) instanceof DerLeaf) { DerLeaf operatorNode = (DerLeaf) node.subtree(0); AbsUnExpr.Oper oper = kTermToUnarOper.get(operatorNode.symb.token); if (oper != null) { AbsExpr subExpr = (AbsExpr) node.subtree(1).accept(this, null); Location loc = new Location(operatorNode, subExpr); return new AbsUnExpr(loc, oper, subExpr); } if (operatorNode.symb.token == Term.NEW) { AbsType type = (AbsType) node.subtree(1).accept(this, null); Location loc = new Location(operatorNode, type); return new AbsNewExpr(loc, type); } if (operatorNode.symb.token == Term.DEL) { AbsExpr expr = (AbsExpr) node.subtree(1).accept(this, null); Location loc = new Location(operatorNode, expr); return new AbsDelExpr(loc, expr); } } else { DerNode exprNode = (DerNode) node.subtree(0); AbsExpr expr = (AbsExpr) exprNode.accept(this, null); if (exprNode.label == Nont.Expr) { return node.subtree(1).accept(this, expr); } else if (exprNode.label == Nont.PstfExpr) { return node.subtree(1).accept(this, expr); } } } case PstfExprRest: { if (node.numSubtrees() == 0) { return visArg; } if (node.subtree(0) instanceof DerLeaf) { DerLeaf varNode = (DerLeaf) node.subtree(0); AbsVarName varName = new AbsVarName(varNode, varNode.symb.lexeme); Location loc = new Location(visArg, varName); AbsRecExpr recordExpr = new AbsRecExpr(loc, (AbsExpr) visArg, varName); return node.subtree(1).accept(this, recordExpr); } else { AbsExpr index = (AbsExpr) node.subtree(0).accept(this, null); Location loc = new Location(visArg, index); AbsArrExpr arrExpr = new AbsArrExpr(loc, (AbsExpr) visArg, index); return node.subtree(1).accept(this, arrExpr); } } case PstfExpr: { return node.subtree(0).accept(this, null); } case AtomExpr: { if (node.numSubtrees() == 1) { return node.subtree(0).accept(this, null); } if (node.numSubtrees() == 2) { boolean isNull = node.subtree(1).accept(this, null) instanceof AbsAtomExpr && node.subtree(1).accept(this, null).location().equals(kNULL_LOCATION); if (isNull) { DerLeaf varName = (DerLeaf) node.subtree(0); Location loc = new Location(varName, varName); return new AbsVarName(loc, varName.symb.lexeme); } else { AbsArgs funArgs = (AbsArgs) node.subtree(1).accept(this, null); DerLeaf funName = (DerLeaf) node.subtree(0); Location loc = new Location(funName, funArgs); return new AbsFunName(loc, funName.symb.lexeme, funArgs); } } if (node.numSubtrees() == 3) { AbsStmts statements = (AbsStmts) node.subtree(0).accept(this, null); AbsExpr expr = (AbsExpr) node.subtree(1).accept(this, null); AbsDecls decls = (AbsDecls) node.subtree(2).accept(this, null); if (decls == null) { Location loc = new Location(statements, expr); return new AbsBlockExpr(loc, EpsilonDecls(), statements, expr); } Location loc = new Location(statements, decls); return new AbsBlockExpr(loc, decls, statements, expr); } } case Literal: { DerLeaf literal = (DerLeaf) node.subtree(0); return new AbsAtomExpr(literal, kTermToLitType.get(literal.symb.token), literal.symb.lexeme); } case CastEps: { if (node.numSubtrees() == 0) { return visArg; } AbsType type = (AbsType) node.subtree(0).accept(this, null); // Double check this for location Location loc = new Location(visArg, type); return new AbsCastExpr(loc, (AbsExpr) visArg, type); } case CallEps: if (node.numSubtrees() == 0) { return Epsilon(); } return node.subtree(0).accept(this, null); case Args: { if (node.numSubtrees() == 0) { return EpsilonArgs(); } return node.subtree(0).accept(this, null); } case ArgsEps: case ArgsRest: { if (node.numSubtrees() == 0) { return null; } Vector<AbsExpr> allExpr = new Vector<AbsExpr>(); AbsExpr expr = (AbsExpr) node.subtree(0).accept(this, null); allExpr.add(expr); AbsArgs args = (AbsArgs) node.subtree(1).accept(this, null); if (args != null) { allExpr.addAll(args.args()); } Location loc = new Location(expr, args == null ? expr : args); return new AbsArgs(loc, allExpr); } case Stmts: case StmtsRest: { if (node.numSubtrees() == 0) { return null; } Vector<AbsStmt> allStmts = new Vector<AbsStmt>(); AbsStmt stmt = (AbsStmt) node.subtree(0).accept(this, null); allStmts.add(stmt); AbsStmts stmts = (AbsStmts) node.subtree(1).accept(this, null); if (stmts != null) { allStmts.addAll(stmts.stmts()); } Location loc = new Location(stmt, stmts == null ? stmt : stmts); return new AbsStmts(loc, allStmts); } case Stmt: { if (node.numSubtrees() == 2) { AbsExpr expr = (AbsExpr) node.subtree(0).accept(this, null); return node.subtree(1).accept(this, expr); } if (node.numSubtrees() == 3) { AbsExpr condition = (AbsExpr) node.subtree(1).accept(this, null); AbsStmts doStatements = (AbsStmts) node.subtree(2).accept(this, null); Location loc = new Location((DerLeaf) node.subtree(0), doStatements); return new AbsWhileStmt(loc, condition, doStatements); } if (node.numSubtrees() == 4) { AbsExpr condition = (AbsExpr) node.subtree(1).accept(this, null); AbsStmts thenStatements = (AbsStmts) node.subtree(2).accept(this, null); AbsStmts elseStatements = (AbsStmts) node.subtree(3).accept(this, null); if (elseStatements == null) { Location loc = new Location((DerLeaf) node.subtree(0), thenStatements); return new AbsIfStmt(loc, condition, thenStatements, EpsilonStmts()); } Location loc = new Location((DerLeaf) node.subtree(0), elseStatements); return new AbsIfStmt(loc, condition, thenStatements, elseStatements); } } case AssignEps: { if (node.numSubtrees() == 0) { return new AbsExprStmt(visArg, (AbsExpr) visArg); } AbsExpr rightSide = (AbsExpr) node.subtree(0).accept(this, null); Location loc = new Location(visArg, rightSide); return new AbsAssignStmt(loc, (AbsExpr) visArg, rightSide); } case ElseEps: case WhereEps: { if (node.numSubtrees() == 0) { return null; } return node.subtree(0).accept(this, null); } } // End Switch return visArg; } // Helper function private AbsTree DeformParDecls(DerNode node) { if (node.numSubtrees() == 0) { return new AbsParDecls(kNULL_LOCATION, new Vector<AbsParDecl>()); } Vector<AbsParDecl> allParDecls = new Vector<AbsParDecl>(); AbsParDecl parDecl = (AbsParDecl) node.subtree(0).accept(this, null); allParDecls.add(parDecl); AbsParDecls parDecls = (AbsParDecls) node.subtree(1).accept(this, null); if (parDecls != null) { allParDecls.addAll(parDecls.parDecls()); } Location loc = new Location(parDecl, parDecls.equals(kNULL_LOCATION) ? parDecl : parDecls); return new AbsParDecls(loc, allParDecls); } private AbsTree DeformParDecl(DerNode node) { DerLeaf identifier = (DerLeaf) node.subtree(0); AbsType type = (AbsType) node.subtree(1).accept(this, null); Location loc = new Location(identifier, type); return new AbsParDecl(loc, identifier.symb.lexeme, type); } // Same function just other class private AbsTree DeformCompDecl(DerNode node) { DerLeaf identifier = (DerLeaf) node.subtree(0); AbsType type = (AbsType) node.subtree(1).accept(this, null); Location loc = new Location(identifier, type); return new AbsCompDecl(loc, identifier.symb.lexeme, type); } private AbsTree ExpressionTransform(DerNode node, AbsTree visArg) { if (node.numSubtrees() == 0) { return visArg; } if (node.numSubtrees() == 1) { return node.subtree(0).accept(this, null); } AbsExpr leftOperand = (AbsExpr) node.subtree(0).accept(this, null); AbsExpr rightOperand = (AbsExpr) node.subtree(1).accept(this, leftOperand); return rightOperand; } private AbsTree Epsilon() { // Hacky try to find other expr that is not abstrac and has less field return new AbsAtomExpr(kNULL_LOCATION, AbsAtomExpr.Type.VOID, ""); } private AbsArgs EpsilonArgs() { return new AbsArgs(kNULL_LOCATION, new Vector<AbsExpr>()); } private AbsDecls EpsilonDecls() { return new AbsDecls(kNULL_LOCATION, new Vector<AbsDecl>()); } private AbsStmts EpsilonStmts() { return new AbsStmts(kNULL_LOCATION, new Vector<AbsStmt>()); } private Map<Term, AbsBinExpr.Oper> kTermToBinOper = new HashMap<Term, AbsBinExpr.Oper>() { { put(Term.IOR, AbsBinExpr.Oper.IOR); put(Term.XOR, AbsBinExpr.Oper.XOR); put(Term.AND, AbsBinExpr.Oper.AND); put(Term.EQU, AbsBinExpr.Oper.EQU); put(Term.NEQ, AbsBinExpr.Oper.NEQ); put(Term.LTH, AbsBinExpr.Oper.LTH); put(Term.GTH, AbsBinExpr.Oper.GTH); put(Term.LEQ, AbsBinExpr.Oper.LEQ); put(Term.GEQ, AbsBinExpr.Oper.GEQ); put(Term.ADD, AbsBinExpr.Oper.ADD); put(Term.SUB, AbsBinExpr.Oper.SUB); put(Term.MUL, AbsBinExpr.Oper.MUL); put(Term.DIV, AbsBinExpr.Oper.DIV); put(Term.MOD, AbsBinExpr.Oper.MOD); } }; private Map<Term, AbsUnExpr.Oper> kTermToUnarOper = new HashMap<Term, AbsUnExpr.Oper>() { { put(Term.NOT, AbsUnExpr.Oper.NOT); put(Term.ADDR, AbsUnExpr.Oper.ADDR); put(Term.DATA, AbsUnExpr.Oper.DATA); put(Term.ADD, AbsUnExpr.Oper.ADD); put(Term.SUB, AbsUnExpr.Oper.SUB); } }; private Map<Term, AbsAtomExpr.Type> kTermToLitType = new HashMap<Term, AbsAtomExpr.Type>() { { put(Term.VOIDCONST, AbsAtomExpr.Type.VOID); put(Term.BOOLCONST, AbsAtomExpr.Type.BOOL); put(Term.PTRCONST, AbsAtomExpr.Type.PTR); put(Term.INTCONST, AbsAtomExpr.Type.INT); put(Term.CHARCONST, AbsAtomExpr.Type.CHAR); put(Term.STRCONST, AbsAtomExpr.Type.STR); } }; }
PREV - Abstr : Right position of function arguments
PREV/prev/srcs/compiler/phases/abstr/AbsTreeConstructor.java
PREV - Abstr : Right position of function arguments
<ide><path>REV/prev/srcs/compiler/phases/abstr/AbsTreeConstructor.java <ide> } else { <ide> AbsArgs funArgs = (AbsArgs) node.subtree(1).accept(this, null); <ide> DerLeaf funName = (DerLeaf) node.subtree(0); <del> Location loc = new Location(funName, funArgs); <add> Location loc = new Location(funName, funArgs.args().size() != 0 ? funArgs : funName); <ide> return new AbsFunName(loc, funName.symb.lexeme, funArgs); <ide> } <ide> }
Java
agpl-3.0
e80647ee8c6e404f442028ca41e4372c771434e4
0
poum/libreplan,dgray16/libreplan,Marine-22/libre,LibrePlan/libreplan,poum/libreplan,LibrePlan/libreplan,skylow95/libreplan,Marine-22/libre,skylow95/libreplan,dgray16/libreplan,PaulLuchyn/libreplan,LibrePlan/libreplan,LibrePlan/libreplan,LibrePlan/libreplan,dgray16/libreplan,PaulLuchyn/libreplan,PaulLuchyn/libreplan,dgray16/libreplan,PaulLuchyn/libreplan,skylow95/libreplan,Marine-22/libre,poum/libreplan,Marine-22/libre,skylow95/libreplan,poum/libreplan,PaulLuchyn/libreplan,dgray16/libreplan,poum/libreplan,skylow95/libreplan,dgray16/libreplan,Marine-22/libre,skylow95/libreplan,PaulLuchyn/libreplan,LibrePlan/libreplan,LibrePlan/libreplan,PaulLuchyn/libreplan,Marine-22/libre,poum/libreplan,dgray16/libreplan
/* * This file is part of LibrePlan * * Copyright (C) 2009-2010 Fundación para o Fomento da Calidade Industrial e * Desenvolvemento Tecnolóxico de Galicia * Copyright (C) 2010-2012 Igalia, S.L. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.libreplan.web.planner; import static org.libreplan.web.I18nHelper._; import static org.libreplan.web.common.Util.addCurrencySymbol; import static org.zkoss.ganttz.data.constraint.ConstraintOnComparableValues.biggerOrEqualThan; import static org.zkoss.ganttz.data.constraint.ConstraintOnComparableValues.equalTo; import static org.zkoss.ganttz.data.constraint.ConstraintOnComparableValues.lessOrEqualThan; import java.math.BigDecimal; import java.math.RoundingMode; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.Validate; import org.apache.commons.lang.math.Fraction; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.joda.time.Days; import org.joda.time.Duration; import org.joda.time.LocalDate; import org.joda.time.Seconds; import org.libreplan.business.calendars.entities.BaseCalendar; import org.libreplan.business.common.IAdHocTransactionService; import org.libreplan.business.common.IOnTransaction; import org.libreplan.business.common.daos.IConfigurationDAO; import org.libreplan.business.common.entities.ProgressType; import org.libreplan.business.externalcompanies.daos.IExternalCompanyDAO; import org.libreplan.business.labels.entities.Label; import org.libreplan.business.orders.daos.IOrderElementDAO; import org.libreplan.business.orders.entities.Order; import org.libreplan.business.orders.entities.OrderElement; import org.libreplan.business.orders.entities.OrderStatusEnum; import org.libreplan.business.orders.entities.SumChargedEffort; import org.libreplan.business.orders.entities.SumExpenses; import org.libreplan.business.planner.daos.IResourceAllocationDAO; import org.libreplan.business.planner.daos.ITaskElementDAO; import org.libreplan.business.planner.entities.Dependency; import org.libreplan.business.planner.entities.Dependency.Type; import org.libreplan.business.planner.entities.GenericResourceAllocation; import org.libreplan.business.planner.entities.IMoneyCostCalculator; import org.libreplan.business.planner.entities.ITaskPositionConstrained; import org.libreplan.business.planner.entities.MoneyCostCalculator; import org.libreplan.business.planner.entities.PositionConstraintType; import org.libreplan.business.planner.entities.ResourceAllocation; import org.libreplan.business.planner.entities.ResourceAllocation.Direction; import org.libreplan.business.planner.entities.SpecificResourceAllocation; import org.libreplan.business.planner.entities.Task; import org.libreplan.business.planner.entities.TaskElement; import org.libreplan.business.planner.entities.TaskElement.IDatesHandler; import org.libreplan.business.planner.entities.TaskGroup; import org.libreplan.business.planner.entities.TaskPositionConstraint; import org.libreplan.business.resources.daos.ICriterionDAO; import org.libreplan.business.resources.daos.IResourcesSearcher; import org.libreplan.business.resources.entities.Criterion; import org.libreplan.business.resources.entities.Resource; import org.libreplan.business.scenarios.entities.Scenario; import org.libreplan.business.workingday.EffortDuration; import org.libreplan.business.workingday.IntraDayDate; import org.libreplan.business.workingday.IntraDayDate.PartialDay; import org.libreplan.web.planner.order.PlanningStateCreator.PlanningState; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; import org.zkoss.ganttz.IDatesMapper; import org.zkoss.ganttz.ProjectStatusEnum; import org.zkoss.ganttz.adapters.DomainDependency; import org.zkoss.ganttz.adapters.IAdapterToTaskFundamentalProperties; import org.zkoss.ganttz.data.DependencyType; import org.zkoss.ganttz.data.GanttDate; import org.zkoss.ganttz.data.GanttDate.Cases; import org.zkoss.ganttz.data.GanttDate.CustomDate; import org.zkoss.ganttz.data.GanttDate.LocalDateBased; import org.zkoss.ganttz.data.ITaskFundamentalProperties; import org.zkoss.ganttz.data.constraint.Constraint; import org.zkoss.ganttz.util.ReentranceGuard; import org.zkoss.ganttz.util.ReentranceGuard.IReentranceCases; /** * @author Óscar González Fernández <[email protected]> * @author Manuel Rego Casasnovas <[email protected]> */ @Component @Scope(BeanDefinition.SCOPE_SINGLETON) public class TaskElementAdapter { private static final Log LOG = LogFactory.getLog(TaskElementAdapter.class); public static List<Constraint<GanttDate>> getStartConstraintsFor( TaskElement taskElement, LocalDate orderInitDate) { if (taskElement instanceof ITaskPositionConstrained) { ITaskPositionConstrained task = (ITaskPositionConstrained) taskElement; TaskPositionConstraint startConstraint = task .getPositionConstraint(); final PositionConstraintType constraintType = startConstraint .getConstraintType(); switch (constraintType) { case AS_SOON_AS_POSSIBLE: if (orderInitDate != null) { return Collections .singletonList(biggerOrEqualThan(toGantt(orderInitDate))); } return Collections.emptyList(); case START_IN_FIXED_DATE: return Collections .singletonList(equalTo(toGantt(startConstraint .getConstraintDate()))); case START_NOT_EARLIER_THAN: return Collections .singletonList(biggerOrEqualThan(toGantt(startConstraint .getConstraintDate()))); } } return Collections.emptyList(); } public static List<Constraint<GanttDate>> getEndConstraintsFor( TaskElement taskElement, LocalDate deadline) { if (taskElement instanceof ITaskPositionConstrained) { ITaskPositionConstrained task = (ITaskPositionConstrained) taskElement; TaskPositionConstraint endConstraint = task.getPositionConstraint(); PositionConstraintType type = endConstraint.getConstraintType(); switch (type) { case AS_LATE_AS_POSSIBLE: if (deadline != null) { return Collections .singletonList(lessOrEqualThan(toGantt(deadline))); } case FINISH_NOT_LATER_THAN: GanttDate date = toGantt(endConstraint.getConstraintDate()); return Collections.singletonList(lessOrEqualThan(date)); } } return Collections.emptyList(); } public static GanttDate toGantt(IntraDayDate date) { return toGantt(date, null); } public static GanttDate toGantt(IntraDayDate date, EffortDuration dayCapacity) { if (date == null) { return null; } if (dayCapacity == null) { // a sensible default dayCapacity = EffortDuration.hours(8); } return new GanttDateAdapter(date, dayCapacity); } public static GanttDate toGantt(LocalDate date) { if (date == null) { return null; } return GanttDate.createFrom(date); } public static IntraDayDate toIntraDay(GanttDate date) { if (date == null) { return null; } return date.byCases(new Cases<GanttDateAdapter, IntraDayDate>( GanttDateAdapter.class) { @Override public IntraDayDate on(LocalDateBased localDate) { return IntraDayDate.startOfDay(localDate.getLocalDate()); } @Override protected IntraDayDate onCustom(GanttDateAdapter customType) { return customType.date; } }); } public IAdapterToTaskFundamentalProperties<TaskElement> createForCompany( Scenario currentScenario) { Adapter result = new Adapter(); result.useScenario(currentScenario); result.setPreventCalculateResourcesText(true); return result; } public IAdapterToTaskFundamentalProperties<TaskElement> createForOrder( Scenario currentScenario, Order order, PlanningState planningState) { Adapter result = new Adapter(planningState); result.useScenario(currentScenario); result.setInitDate(asLocalDate(order.getInitDate())); result.setDeadline(asLocalDate(order.getDeadline())); return result; } private LocalDate asLocalDate(Date date) { return date != null ? LocalDate.fromDateFields(date) : null; } @Autowired private IAdHocTransactionService transactionService; private final ReentranceGuard reentranceGuard = new ReentranceGuard(); @Autowired private IOrderElementDAO orderElementDAO; @Autowired private ITaskElementDAO taskDAO; @Autowired private ICriterionDAO criterionDAO; @Autowired private IResourceAllocationDAO resourceAllocationDAO; @Autowired private IExternalCompanyDAO externalCompanyDAO; @Autowired private IResourcesSearcher searcher; @Autowired private IConfigurationDAO configurationDAO; @Autowired private IMoneyCostCalculator moneyCostCalculator; static class GanttDateAdapter extends CustomDate { private static final int DAY_MILLISECONDS = (int) Days.days(1) .toStandardDuration().getMillis(); private final IntraDayDate date; private final Duration workingDayDuration; GanttDateAdapter(IntraDayDate date, EffortDuration capacityForDay) { this.date = date; this.workingDayDuration = toMilliseconds(capacityForDay); } protected int compareToCustom(CustomDate customType) { if (customType instanceof GanttDateAdapter) { GanttDateAdapter other = (GanttDateAdapter) customType; return this.date.compareTo(other.date); } throw new RuntimeException("incompatible type: " + customType); } protected int compareToLocalDate(LocalDate localDate) { return this.date.compareTo(localDate); } public IntraDayDate getDate() { return date; } @Override public Date toDayRoundedDate() { return date.toDateTimeAtStartOfDay().toDate(); } @Override public LocalDate toLocalDate() { return date.getDate(); } @Override public LocalDate asExclusiveEnd() { return date.asExclusiveEnd(); } @Override protected boolean isEqualsToCustom(CustomDate customType) { if (customType instanceof GanttDateAdapter) { GanttDateAdapter other = (GanttDateAdapter) customType; return this.date.equals(other.date); } return false; } @Override public int hashCode() { return date.hashCode(); } @Override public int toPixels(IDatesMapper datesMapper) { int pixesUntilDate = datesMapper.toPixels(this.date.getDate()); EffortDuration effortDuration = date.getEffortDuration(); Duration durationInDay = calculateDurationInDayFor(effortDuration); int pixelsInsideDay = datesMapper.toPixels(durationInDay); return pixesUntilDate + pixelsInsideDay; } private Duration calculateDurationInDayFor(EffortDuration effortDuration) { if (workingDayDuration.getStandardSeconds() == 0) { return Duration.ZERO; } Fraction fraction = fractionOfWorkingDayFor(effortDuration); try { return new Duration(fraction.multiplyBy( Fraction.getFraction(DAY_MILLISECONDS, 1)).intValue()); } catch (ArithmeticException e) { // if fraction overflows use floating point arithmetic return new Duration( (int) (fraction.doubleValue() * DAY_MILLISECONDS)); } } @SuppressWarnings("unchecked") private Fraction fractionOfWorkingDayFor(EffortDuration effortDuration) { Duration durationInDay = toMilliseconds(effortDuration); // cast to int is safe because there are not enough seconds in // day // to overflow Fraction fraction = Fraction.getFraction( (int) durationInDay.getStandardSeconds(), (int) workingDayDuration.getStandardSeconds()); return (Fraction) Collections.min(Arrays.asList(fraction, Fraction.ONE)); } private static Duration toMilliseconds(EffortDuration duration) { return Seconds.seconds(duration.getSeconds()).toStandardDuration(); } } /** * Responsible of adaptating a {@link TaskElement} into a * {@link ITaskFundamentalProperties} <br /> * @author Óscar González Fernández <[email protected]> */ public class Adapter implements IAdapterToTaskFundamentalProperties<TaskElement> { private Scenario scenario; private LocalDate initDate; private LocalDate deadline; private boolean preventCalculateResourcesText = false; private final PlanningState planningState; private void useScenario(Scenario scenario) { this.scenario = scenario; } private void setInitDate(LocalDate initDate) { this.initDate = initDate; } private void setDeadline(LocalDate deadline) { this.deadline = deadline; } public boolean isPreventCalculateResourcesText() { return preventCalculateResourcesText; } public void setPreventCalculateResourcesText( boolean preventCalculateResourcesText) { this.preventCalculateResourcesText = preventCalculateResourcesText; } public Adapter() { this(null); } public Adapter(PlanningState planningState) { this.planningState = planningState; } private class TaskElementWrapper implements ITaskFundamentalProperties { private final TaskElement taskElement; private final Scenario currentScenario; protected TaskElementWrapper(Scenario currentScenario, TaskElement taskElement) { Validate.notNull(currentScenario); this.currentScenario = currentScenario; this.taskElement = taskElement; } private final IUpdatablePosition position = new IUpdatablePosition() { @Override public void setEndDate(GanttDate endDate) { stepsBeforePossibleReallocation(); getDatesHandler(taskElement).moveEndTo(toIntraDay(endDate)); } @Override public void setBeginDate(final GanttDate beginDate) { stepsBeforePossibleReallocation(); getDatesHandler(taskElement).moveTo(toIntraDay(beginDate)); } @Override public void resizeTo(final GanttDate endDate) { stepsBeforePossibleReallocation(); updateTaskPositionConstraint(endDate); getDatesHandler(taskElement).resizeTo(toIntraDay(endDate)); } private void stepsBeforePossibleReallocation() { taskDAO.reattach(taskElement); } @Override public void moveTo(GanttDate newStart) { if (taskElement instanceof ITaskPositionConstrained) { ITaskPositionConstrained task = (ITaskPositionConstrained) taskElement; GanttDate newEnd = inferEndFrom(newStart); if (task.getPositionConstraint() .isConstraintAppliedToStart()) { setBeginDate(newStart); } else { setEndDate(newEnd); } task.explicityMoved(toIntraDay(newStart), toIntraDay(newEnd)); } } }; @Override public void setName(String name) { taskElement.setName(name); } @Override public void setNotes(String notes) { taskElement.setNotes(notes); } @Override public String getName() { return taskElement.getName(); } @Override public String getCode() { return taskElement.getCode(); } @Override public String getProjectCode() { return taskElement.getProjectCode(); } @Override public String getNotes() { return taskElement.getNotes(); } @Override public GanttDate getBeginDate() { IntraDayDate start = taskElement.getIntraDayStartDate(); return toGantt(start); } private GanttDate toGantt(IntraDayDate date) { BaseCalendar calendar = taskElement.getCalendar(); if (calendar == null) { return TaskElementAdapter.toGantt(date); } return TaskElementAdapter.toGantt(date, calendar .getCapacityOn(PartialDay.wholeDay(date.getDate()))); } @Override public void doPositionModifications( final IModifications modifications) { reentranceGuard.entranceRequested(new IReentranceCases() { @Override public void ifNewEntrance() { transactionService.runOnReadOnlyTransaction(asTransaction(modifications)); } IOnTransaction<Void> asTransaction( final IModifications modifications) { return new IOnTransaction<Void>() { @Override public Void execute() { if (planningState != null) { planningState .reassociateResourcesWithSession(); } modifications.doIt(position); return null; } }; } @Override public void ifAlreadyInside() { modifications.doIt(position); } }); } @Override public GanttDate getEndDate() { return toGantt(taskElement.getIntraDayEndDate()); } IDatesHandler getDatesHandler(TaskElement taskElement) { return taskElement.getDatesHandler(currentScenario, searcher); } private void updateTaskPositionConstraint(GanttDate endDate) { if (taskElement instanceof ITaskPositionConstrained) { ITaskPositionConstrained task = (ITaskPositionConstrained) taskElement; PositionConstraintType constraintType = task .getPositionConstraint().getConstraintType(); if (constraintType .compareTo(PositionConstraintType.FINISH_NOT_LATER_THAN) == 0 || constraintType .compareTo(PositionConstraintType.AS_LATE_AS_POSSIBLE) == 0) { task.explicityMoved(taskElement.getIntraDayStartDate(), toIntraDay(endDate)); } } } @Override public GanttDate getHoursAdvanceBarEndDate() { return calculateLimitDateProportionalToTaskElementSize(getHoursAdvanceBarPercentage()); } @Override public BigDecimal getHoursAdvanceBarPercentage() { OrderElement orderElement = taskElement.getOrderElement(); if (orderElement == null) { return BigDecimal.ZERO; } EffortDuration totalChargedEffort = orderElement .getSumChargedEffort() != null ? orderElement .getSumChargedEffort().getTotalChargedEffort() : EffortDuration.zero(); EffortDuration estimatedEffort = taskElement.getSumOfAssignedEffort(); if(estimatedEffort.isZero()) { estimatedEffort = EffortDuration.hours(orderElement.getWorkHours()); if(estimatedEffort.isZero()) { return BigDecimal.ZERO; } } return new BigDecimal(totalChargedEffort.divivedBy( estimatedEffort).doubleValue()).setScale(2, RoundingMode.HALF_UP); } @Override public GanttDate getMoneyCostBarEndDate() { return calculateLimitDateProportionalToTaskElementSize(getMoneyCostBarPercentage()); } private GanttDate calculateLimitDateProportionalToTaskElementSize( BigDecimal proportion) { if (proportion.compareTo(BigDecimal.ZERO) == 0) { return getBeginDate(); } IntraDayDate start = taskElement.getIntraDayStartDate(); IntraDayDate end = taskElement.getIntraDayEndDate(); EffortDuration effortBetween = start.effortUntil(end); int seconds = new BigDecimal(effortBetween.getSeconds()) .multiply(proportion).toBigInteger().intValue(); return TaskElementAdapter.toGantt( start.addEffort(EffortDuration.seconds(seconds)), EffortDuration.hours(8)); } @Override public BigDecimal getMoneyCostBarPercentage() { return MoneyCostCalculator.getMoneyCostProportion( getMoneyCost(), getBudget()); } private BigDecimal getBudget() { if ((taskElement == null) || (taskElement.getOrderElement() == null)) { return BigDecimal.ZERO; } return taskElement.getOrderElement().getBudget(); } private BigDecimal getTotalCalculatedBudget() { if ((taskElement == null) || (taskElement.getOrderElement() == null)) { return BigDecimal.ZERO; } return transactionService .runOnReadOnlyTransaction(new IOnTransaction<BigDecimal>() { @Override public BigDecimal execute() { return taskElement.getOrderElement() .getTotalBudget(); } }); } private BigDecimal getMoneyCost() { if ((taskElement == null) || (taskElement.getOrderElement() == null)) { return BigDecimal.ZERO; } return transactionService .runOnReadOnlyTransaction(new IOnTransaction<BigDecimal>() { @Override public BigDecimal execute() { return moneyCostCalculator.getTotalMoneyCost(taskElement .getOrderElement()); } }); } private BigDecimal getHoursMoneyCost() { if ((taskElement == null) || (taskElement.getOrderElement() == null)) { return BigDecimal.ZERO; } return transactionService .runOnReadOnlyTransaction(new IOnTransaction<BigDecimal>() { @Override public BigDecimal execute() { return moneyCostCalculator.getHoursMoneyCost(taskElement.getOrderElement()); } }); } private BigDecimal getExpensesMoneyCost() { if ((taskElement == null) || (taskElement.getOrderElement() == null)) { return BigDecimal.ZERO; } return transactionService .runOnReadOnlyTransaction(new IOnTransaction<BigDecimal>() { @Override public BigDecimal execute() { return moneyCostCalculator.getExpensesMoneyCost(taskElement .getOrderElement()); } }); } @Override public GanttDate getAdvanceBarEndDate(String progressType) { return getAdvanceBarEndDate(ProgressType.asEnum(progressType)); } private GanttDate getAdvanceBarEndDate(ProgressType progressType) { BigDecimal advancePercentage = BigDecimal.ZERO; if (taskElement.getOrderElement() != null) { advancePercentage = taskElement .getAdvancePercentage(progressType); } return getAdvanceBarEndDate(advancePercentage); } @Override public GanttDate getAdvanceBarEndDate() { return getAdvanceBarEndDate(getAdvancePercentage()); } private boolean isTaskRoot(TaskElement taskElement) { return taskElement instanceof TaskGroup && taskElement.getParent() == null; } private ProgressType getProgressTypeFromConfiguration() { return transactionService .runOnReadOnlyTransaction(new IOnTransaction<ProgressType>() { @Override public ProgressType execute() { return configurationDAO.getConfiguration() .getProgressType(); } }); } private GanttDate getAdvanceBarEndDate(BigDecimal advancePercentage) { return calculateLimitDateProportionalToTaskElementSize(advancePercentage); } @Override public String getTooltipText() { if (taskElement.isMilestone() || taskElement.getOrderElement() == null) { return ""; } return transactionService .runOnReadOnlyTransaction(new IOnTransaction<String>() { @Override public String execute() { orderElementDAO.reattach(taskElement .getOrderElement()); return buildTooltipText(); } }); } @Override public String getLabelsText() { if (taskElement.isMilestone() || taskElement.getOrderElement() == null) { return ""; } return transactionService .runOnReadOnlyTransaction(new IOnTransaction<String>() { @Override public String execute() { orderElementDAO.reattach(taskElement .getOrderElement()); return buildLabelsText(); } }); } @Override public String getResourcesText() { if (isPreventCalculateResourcesText() || taskElement.getOrderElement() == null) { return ""; } try { return transactionService .runOnAnotherReadOnlyTransaction(new IOnTransaction<String>() { @Override public String execute() { orderElementDAO.reattach(taskElement .getOrderElement()); if (taskElement.isSubcontracted()) { externalCompanyDAO.reattach(taskElement .getSubcontractedCompany()); } return buildResourcesText(); } }); } catch (Exception e) { LOG.error("error calculating resources text", e); return ""; } } private Set<Label> getLabelsFromElementAndPredecesors( OrderElement order) { if (order != null) { if (order.getParent() == null) { return order.getLabels(); } else { HashSet<Label> labels = new HashSet<Label>( order.getLabels()); labels.addAll(getLabelsFromElementAndPredecesors(order .getParent())); return labels; } } return new HashSet<Label>(); } private String buildLabelsText() { List<String> result = new ArrayList<String>(); if (taskElement.getOrderElement() != null) { Set<Label> labels = getLabelsFromElementAndPredecesors(taskElement .getOrderElement()); for (Label label : labels) { String representation = label.getName(); if (!result.contains(representation)) { result.add(representation); } } } Collections.sort(result); return StringUtils.join(result, ", "); } private String buildResourcesText() { List<String> result = new ArrayList<String>(); for (ResourceAllocation<?> each : taskElement .getSatisfiedResourceAllocations()) { if (each instanceof SpecificResourceAllocation) { for (Resource r : each.getAssociatedResources()) { String representation = r.getName(); if (!result.contains(representation)) { result.add(representation); } } } else { String representation = extractRepresentationForGeneric((GenericResourceAllocation) each); if (!result.contains(representation)) { result.add(representation); } } } if (taskElement.isSubcontracted()) { result.add(taskElement.getSubcontractionName()); } Collections.sort(result); return StringUtils.join(result, "; "); } private String extractRepresentationForGeneric( GenericResourceAllocation generic) { if (!generic.isNewObject()) { resourceAllocationDAO.reattach(generic); } Set<Criterion> criterions = generic.getCriterions(); List<String> forCriterionRepresentations = new ArrayList<String>(); if (!criterions.isEmpty()) { for (Criterion c : criterions) { criterionDAO.reattachUnmodifiedEntity(c); forCriterionRepresentations.add(c.getName()); } } else { forCriterionRepresentations.add((_("All workers"))); } return "[" + StringUtils.join(forCriterionRepresentations, ", ") + "]"; } @Override public String updateTooltipText() { return buildTooltipText(); } @Override public String updateTooltipText(String progressType) { return buildTooltipText(ProgressType.asEnum(progressType)); } @Override public BigDecimal getAdvancePercentage() { if (taskElement != null) { BigDecimal advancePercentage; if (isTaskRoot(taskElement)) { ProgressType progressType = getProgressTypeFromConfiguration(); advancePercentage = taskElement .getAdvancePercentage(progressType); } else { advancePercentage = taskElement.getAdvancePercentage(); } return advancePercentage; } return new BigDecimal(0); } private String buildTooltipText() { return buildTooltipText(asPercentage(getAdvancePercentage())); } private BigDecimal asPercentage(BigDecimal value) { return value.multiply(BigDecimal.valueOf(100)).setScale(2, RoundingMode.DOWN); } private String buildTooltipText(BigDecimal progressPercentage) { StringBuilder result = new StringBuilder(); result.append("<strong>" + getName() + "</strong><br/>"); result.append(_("Progress") + ": ").append(progressPercentage) .append("% , "); result.append(_("Hours invested") + ": ") .append(getHoursAdvanceBarPercentage().multiply( new BigDecimal(100))).append("% <br/>"); if (taskElement.getOrderElement() instanceof Order) { result.append(_("State") + ": ").append(getOrderState()); } else { String budget = addCurrencySymbol(getTotalCalculatedBudget()); String moneyCost = addCurrencySymbol(getMoneyCost()); String costHours = addCurrencySymbol(getHoursMoneyCost()); String costExpenses = addCurrencySymbol(getExpensesMoneyCost()); result.append( _("Budget: {0}, Consumed: {1} ({2}%)", budget, moneyCost, getMoneyCostBarPercentage().multiply(new BigDecimal(100)))) .append("<br/>"); result.append( _( "Hours cost: {0}, Expenses cost: {1}", costHours, costExpenses)); } String labels = buildLabelsText(); if (!labels.equals("")) { result.append("<div class='tooltip-labels'>" + _("Labels") + ": " + labels + "</div>"); } return result.toString(); } private String buildTooltipText(ProgressType progressType) { return buildTooltipText(asPercentage(taskElement .getAdvancePercentage(progressType))); } private String getOrderState() { String cssClass; OrderStatusEnum state = taskElement.getOrderElement() .getOrder().getState(); if (Arrays.asList(OrderStatusEnum.ACCEPTED, OrderStatusEnum.OFFERED, OrderStatusEnum.STARTED, OrderStatusEnum.OUTSOURCED).contains( state)) { if (taskElement.getAssignedStatus() == "assigned") { cssClass = "order-open-assigned"; } else { cssClass = "order-open-unassigned"; } } else { cssClass = "order-closed"; } return "<font class='" + cssClass + "'>" + _(state.toString()) + "</font>"; } @Override public List<Constraint<GanttDate>> getStartConstraints() { return getStartConstraintsFor(this.taskElement, initDate); } @Override public List<Constraint<GanttDate>> getEndConstraints() { return getEndConstraintsFor(this.taskElement, deadline); } @Override public List<Constraint<GanttDate>> getCurrentLengthConstraint() { if (taskElement instanceof Task) { Task task = (Task) taskElement; if (task.getAllocationDirection() == Direction.FORWARD) { return Collections .singletonList(biggerOrEqualThan(getEndDate())); } } return Collections.emptyList(); } private GanttDate inferEndFrom(GanttDate newStart) { if (taskElement instanceof Task) { Task task = (Task) taskElement; return toGantt(task .calculateEndKeepingLength(toIntraDay(newStart))); } return newStart; } @Override public Date getDeadline() { LocalDate deadline = taskElement.getDeadline(); if (deadline == null) { return null; } return deadline.toDateTimeAtStartOfDay().toDate(); } @Override public void setDeadline(Date date) { if (date != null) { taskElement.setDeadline(LocalDate.fromDateFields(date)); } else { taskElement.setDeadline(null); } } @Override public GanttDate getConsolidatedline() { if (!taskElement.isLeaf() || !taskElement.hasConsolidations()) { return null; } LocalDate consolidatedline = ((Task) taskElement) .getFirstDayNotConsolidated().getDate(); return TaskElementAdapter.toGantt(consolidatedline); } @Override public boolean isSubcontracted() { return taskElement.isSubcontracted(); } @Override public boolean isLimiting() { return taskElement.isLimiting(); } @Override public boolean isLimitingAndHasDayAssignments() { return taskElement.isLimitingAndHasDayAssignments(); } public boolean hasConsolidations() { return taskElement.hasConsolidations(); } @Override public boolean canBeExplicitlyResized() { return taskElement.canBeExplicitlyResized(); } @Override public String getAssignedStatus() { return taskElement.getAssignedStatus(); } @Override public boolean isFixed() { return taskElement.isLimitingAndHasDayAssignments() || taskElement.hasConsolidations() || taskElement.isUpdatedFromTimesheets(); } @Override public boolean isManualAnyAllocation() { return taskElement.isTask() && ((Task) taskElement).isManualAnyAllocation(); } @Override public boolean belongsClosedProject() { return taskElement.belongsClosedProject(); } @Override public boolean isRoot() { return taskElement.isRoot(); } @Override public boolean isUpdatedFromTimesheets() { return taskElement.isUpdatedFromTimesheets(); } @Override public Date getFirstTimesheetDate() { OrderElement orderElement = taskElement.getOrderElement(); if (orderElement != null) { return orderElement.getFirstTimesheetDate(); } return null; } @Override public Date getLastTimesheetDate() { OrderElement orderElement = taskElement.getOrderElement(); if (orderElement != null) { return orderElement.getLastTimesheetDate(); } return null; } @Override public ProjectStatusEnum getProjectHoursStatus() { if (taskElement.isTask()) { return getProjectHourStatus(taskElement.getOrderElement()); } List<TaskElement> taskElements = taskElement.getAllChildren(); ProjectStatusEnum status = ProjectStatusEnum.AS_PLANNED; ProjectStatusEnum highestStatus = null; for (TaskElement taskElement : taskElements) { if (!taskElement.isTask()) { continue; } status = getProjectHourStatus(taskElement.getOrderElement()); if (status == ProjectStatusEnum.MARGIN_EXCEEDED) { highestStatus = ProjectStatusEnum.MARGIN_EXCEEDED; break; } if (status == ProjectStatusEnum.WITHIN_MARGIN) { highestStatus = ProjectStatusEnum.WITHIN_MARGIN; } } if (highestStatus != null) { status = highestStatus; } return status; } /** * Returns {@link ProjectStatusEnum} for the specified * <code>orderElement</code> * * @param orderElement */ private ProjectStatusEnum getProjectHourStatus(OrderElement orderElement) { EffortDuration sumChargedEffort = getSumChargedEffort(orderElement); EffortDuration estimatedEffort = getEstimatedEffort(orderElement); if (sumChargedEffort.isZero() || sumChargedEffort.compareTo(estimatedEffort) <= 0) { return ProjectStatusEnum.AS_PLANNED; } EffortDuration withMarginEstimatedHours = orderElement .getWithMarginCalculatedHours(); if (estimatedEffort.compareTo(sumChargedEffort) < 0 && sumChargedEffort.compareTo(withMarginEstimatedHours) <= 0) { return ProjectStatusEnum.WITHIN_MARGIN; } return ProjectStatusEnum.MARGIN_EXCEEDED; } /** * Returns sum charged effort for the specified * <code>orderElement</code> * * @param orderElement */ private EffortDuration getSumChargedEffort(OrderElement orderElement) { SumChargedEffort sumChargedEffort = orderElement .getSumChargedEffort(); EffortDuration totalChargedEffort = sumChargedEffort != null ? sumChargedEffort .getTotalChargedEffort() : EffortDuration.zero(); return totalChargedEffort; } /** * Returns the estimated effort for the specified * <code>orderElement</code> * * @param orderElement */ private EffortDuration getEstimatedEffort(OrderElement orderElement) { return EffortDuration.fromHoursAsBigDecimal(new BigDecimal( orderElement.getWorkHours()).setScale(2)); } @Override public ProjectStatusEnum getProjectBudgetStatus() { if (taskElement.isTask()) { return getProjectBudgetStatus(taskElement.getOrderElement()); } List<TaskElement> taskElements = taskElement.getAllChildren(); ProjectStatusEnum status = ProjectStatusEnum.AS_PLANNED; ProjectStatusEnum highestStatus = null; for (TaskElement taskElement : taskElements) { if (!taskElement.isTask()) { continue; } status = getProjectBudgetStatus(taskElement .getOrderElement()); if (status == ProjectStatusEnum.MARGIN_EXCEEDED) { highestStatus = ProjectStatusEnum.MARGIN_EXCEEDED; break; } if (status == ProjectStatusEnum.WITHIN_MARGIN) { highestStatus = ProjectStatusEnum.WITHIN_MARGIN; } } if (highestStatus != null) { status = highestStatus; } return status; } /** * Returns {@link ProjectStatusEnum} for the specified * <code>orderElement</code> * * @param orderElement */ private ProjectStatusEnum getProjectBudgetStatus( OrderElement orderElement) { BigDecimal budget = orderElement.getBudget(); BigDecimal totalExpense = getTotalExpense(orderElement); BigDecimal withMarginCalculatedBudget = orderElement .getWithMarginCalculatedBudget(); if (totalExpense.compareTo(budget) <= 0) { return ProjectStatusEnum.AS_PLANNED; } if (budget.compareTo(totalExpense) < 0 && totalExpense.compareTo(withMarginCalculatedBudget) <= 0) { return ProjectStatusEnum.WITHIN_MARGIN; } return ProjectStatusEnum.MARGIN_EXCEEDED; } /** * Returns total expense for the specified <code>orderElement</code> * * @param orderElement */ public BigDecimal getTotalExpense(OrderElement orderElement) { BigDecimal total = BigDecimal.ZERO; SumExpenses sumExpenses = orderElement.getSumExpenses(); if (sumExpenses != null) { BigDecimal directExpenes = sumExpenses .getTotalDirectExpenses(); BigDecimal indirectExpense = sumExpenses .getTotalIndirectExpenses(); if (directExpenes != null) { total = total.add(directExpenes); } if (indirectExpense != null) { total = total.add(indirectExpense); } } return total; } @Override public String getTooltipTextForProjectHoursStatus() { if (taskElement.isTask()) { return buildHoursTooltipText(taskElement.getOrderElement()); } return null; } @Override public String getTooltipTextForProjectBudgetStatus() { if (taskElement.isTask()) { return buildBudgetTooltipText(taskElement.getOrderElement()); } return null; } /** * Builds hours tooltiptext for the specified * <code>orderElement</code> * * @param orderElement */ private String buildHoursTooltipText(OrderElement orderElement) { StringBuilder result = new StringBuilder(); Integer margin = orderElement.getOrder().getHoursMargin() != null ? orderElement .getOrder().getHoursMargin() : 0; result.append(_("Hours-status") + "\n"); result.append(_("Project margin: {0}% ({1} hours)={2} hours", margin, orderElement.getWorkHours(), orderElement.getWithMarginCalculatedHours())); String totalEffortHours = orderElement.getEffortAsString(); result.append(_(". Already registered: {0} hours", totalEffortHours)); return result.toString(); } private String buildBudgetTooltipText(OrderElement orderElement) { StringBuilder result = new StringBuilder(); Integer margin = orderElement.getOrder().getBudgetMargin() != null ? orderElement .getOrder().getBudgetMargin() : 0; result.append(_("Budget-status") + "\n"); result.append(_("Project margin: {0}% ({1})={2}", margin, addCurrencySymbol(orderElement.getBudget()), addCurrencySymbol(orderElement .getWithMarginCalculatedBudget()))); BigDecimal totalExpense = getTotalExpense(orderElement); result.append(_(". Already spent: {0}", addCurrencySymbol(totalExpense))); return result.toString(); } } @Override public ITaskFundamentalProperties adapt(final TaskElement taskElement) { return new TaskElementWrapper(scenario, taskElement); } @Override public List<DomainDependency<TaskElement>> getIncomingDependencies( TaskElement taskElement) { return toDomainDependencies(taskElement .getDependenciesWithThisDestination()); } @Override public List<DomainDependency<TaskElement>> getOutcomingDependencies( TaskElement taskElement) { return toDomainDependencies(taskElement .getDependenciesWithThisOrigin()); } private List<DomainDependency<TaskElement>> toDomainDependencies( Collection<? extends Dependency> dependencies) { List<DomainDependency<TaskElement>> result = new ArrayList<DomainDependency<TaskElement>>(); for (Dependency dependency : dependencies) { result.add(DomainDependency.createDependency( dependency.getOrigin(), dependency.getDestination(), toGanntType(dependency.getType()))); } return result; } private DependencyType toGanntType(Type type) { switch (type) { case END_START: return DependencyType.END_START; case START_END: return DependencyType.START_END; case START_START: return DependencyType.START_START; case END_END: return DependencyType.END_END; default: throw new RuntimeException(_("{0} not supported yet", type)); } } private Type toDomainType(DependencyType type) { switch (type) { case END_START: return Type.END_START; case START_END: return Type.START_END; case START_START: return Type.START_START; case END_END: return Type.END_END; default: throw new RuntimeException(_("{0} not supported yet", type)); } } @Override public void addDependency(DomainDependency<TaskElement> dependency) { TaskElement source = dependency.getSource(); TaskElement destination = dependency.getDestination(); Type domainType = toDomainType(dependency.getType()); Dependency.create(source, destination, domainType); } @Override public boolean canAddDependency(DomainDependency<TaskElement> dependency) { return true; } @Override public void removeDependency(DomainDependency<TaskElement> dependency) { TaskElement source = dependency.getSource(); Type type = toDomainType(dependency.getType()); source.removeDependencyWithDestination(dependency.getDestination(), type); } @Override public void doRemovalOf(TaskElement taskElement) { taskElement.detach(); TaskGroup parent = taskElement.getParent(); if (parent != null) { parent.remove(taskElement); } } } }
libreplan-webapp/src/main/java/org/libreplan/web/planner/TaskElementAdapter.java
/* * This file is part of LibrePlan * * Copyright (C) 2009-2010 Fundación para o Fomento da Calidade Industrial e * Desenvolvemento Tecnolóxico de Galicia * Copyright (C) 2010-2012 Igalia, S.L. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.libreplan.web.planner; import static org.libreplan.web.I18nHelper._; import static org.libreplan.web.common.Util.addCurrencySymbol; import static org.zkoss.ganttz.data.constraint.ConstraintOnComparableValues.biggerOrEqualThan; import static org.zkoss.ganttz.data.constraint.ConstraintOnComparableValues.equalTo; import static org.zkoss.ganttz.data.constraint.ConstraintOnComparableValues.lessOrEqualThan; import java.math.BigDecimal; import java.math.RoundingMode; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.Validate; import org.apache.commons.lang.math.Fraction; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.joda.time.Days; import org.joda.time.Duration; import org.joda.time.LocalDate; import org.joda.time.Seconds; import org.libreplan.business.calendars.entities.BaseCalendar; import org.libreplan.business.common.IAdHocTransactionService; import org.libreplan.business.common.IOnTransaction; import org.libreplan.business.common.daos.IConfigurationDAO; import org.libreplan.business.common.entities.ProgressType; import org.libreplan.business.externalcompanies.daos.IExternalCompanyDAO; import org.libreplan.business.labels.entities.Label; import org.libreplan.business.orders.daos.IOrderElementDAO; import org.libreplan.business.orders.entities.Order; import org.libreplan.business.orders.entities.OrderElement; import org.libreplan.business.orders.entities.OrderStatusEnum; import org.libreplan.business.orders.entities.SumChargedEffort; import org.libreplan.business.orders.entities.SumExpenses; import org.libreplan.business.planner.daos.IResourceAllocationDAO; import org.libreplan.business.planner.daos.ITaskElementDAO; import org.libreplan.business.planner.entities.Dependency; import org.libreplan.business.planner.entities.Dependency.Type; import org.libreplan.business.planner.entities.GenericResourceAllocation; import org.libreplan.business.planner.entities.IMoneyCostCalculator; import org.libreplan.business.planner.entities.ITaskPositionConstrained; import org.libreplan.business.planner.entities.MoneyCostCalculator; import org.libreplan.business.planner.entities.PositionConstraintType; import org.libreplan.business.planner.entities.ResourceAllocation; import org.libreplan.business.planner.entities.ResourceAllocation.Direction; import org.libreplan.business.planner.entities.SpecificResourceAllocation; import org.libreplan.business.planner.entities.Task; import org.libreplan.business.planner.entities.TaskElement; import org.libreplan.business.planner.entities.TaskElement.IDatesHandler; import org.libreplan.business.planner.entities.TaskGroup; import org.libreplan.business.planner.entities.TaskPositionConstraint; import org.libreplan.business.resources.daos.ICriterionDAO; import org.libreplan.business.resources.daos.IResourcesSearcher; import org.libreplan.business.resources.entities.Criterion; import org.libreplan.business.resources.entities.Resource; import org.libreplan.business.scenarios.entities.Scenario; import org.libreplan.business.workingday.EffortDuration; import org.libreplan.business.workingday.IntraDayDate; import org.libreplan.business.workingday.IntraDayDate.PartialDay; import org.libreplan.web.planner.order.PlanningStateCreator.PlanningState; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; import org.zkoss.ganttz.IDatesMapper; import org.zkoss.ganttz.ProjectStatusEnum; import org.zkoss.ganttz.adapters.DomainDependency; import org.zkoss.ganttz.adapters.IAdapterToTaskFundamentalProperties; import org.zkoss.ganttz.data.DependencyType; import org.zkoss.ganttz.data.GanttDate; import org.zkoss.ganttz.data.GanttDate.Cases; import org.zkoss.ganttz.data.GanttDate.CustomDate; import org.zkoss.ganttz.data.GanttDate.LocalDateBased; import org.zkoss.ganttz.data.ITaskFundamentalProperties; import org.zkoss.ganttz.data.constraint.Constraint; import org.zkoss.ganttz.util.ReentranceGuard; import org.zkoss.ganttz.util.ReentranceGuard.IReentranceCases; /** * @author Óscar González Fernández <[email protected]> * @author Manuel Rego Casasnovas <[email protected]> */ @Component @Scope(BeanDefinition.SCOPE_SINGLETON) public class TaskElementAdapter { private static final Log LOG = LogFactory.getLog(TaskElementAdapter.class); public static List<Constraint<GanttDate>> getStartConstraintsFor( TaskElement taskElement, LocalDate orderInitDate) { if (taskElement instanceof ITaskPositionConstrained) { ITaskPositionConstrained task = (ITaskPositionConstrained) taskElement; TaskPositionConstraint startConstraint = task .getPositionConstraint(); final PositionConstraintType constraintType = startConstraint .getConstraintType(); switch (constraintType) { case AS_SOON_AS_POSSIBLE: if (orderInitDate != null) { return Collections .singletonList(biggerOrEqualThan(toGantt(orderInitDate))); } return Collections.emptyList(); case START_IN_FIXED_DATE: return Collections .singletonList(equalTo(toGantt(startConstraint .getConstraintDate()))); case START_NOT_EARLIER_THAN: return Collections .singletonList(biggerOrEqualThan(toGantt(startConstraint .getConstraintDate()))); } } return Collections.emptyList(); } public static List<Constraint<GanttDate>> getEndConstraintsFor( TaskElement taskElement, LocalDate deadline) { if (taskElement instanceof ITaskPositionConstrained) { ITaskPositionConstrained task = (ITaskPositionConstrained) taskElement; TaskPositionConstraint endConstraint = task.getPositionConstraint(); PositionConstraintType type = endConstraint.getConstraintType(); switch (type) { case AS_LATE_AS_POSSIBLE: if (deadline != null) { return Collections .singletonList(lessOrEqualThan(toGantt(deadline))); } case FINISH_NOT_LATER_THAN: GanttDate date = toGantt(endConstraint.getConstraintDate()); return Collections.singletonList(lessOrEqualThan(date)); } } return Collections.emptyList(); } public static GanttDate toGantt(IntraDayDate date) { return toGantt(date, null); } public static GanttDate toGantt(IntraDayDate date, EffortDuration dayCapacity) { if (date == null) { return null; } if (dayCapacity == null) { // a sensible default dayCapacity = EffortDuration.hours(8); } return new GanttDateAdapter(date, dayCapacity); } public static GanttDate toGantt(LocalDate date) { if (date == null) { return null; } return GanttDate.createFrom(date); } public static IntraDayDate toIntraDay(GanttDate date) { if (date == null) { return null; } return date.byCases(new Cases<GanttDateAdapter, IntraDayDate>( GanttDateAdapter.class) { @Override public IntraDayDate on(LocalDateBased localDate) { return IntraDayDate.startOfDay(localDate.getLocalDate()); } @Override protected IntraDayDate onCustom(GanttDateAdapter customType) { return customType.date; } }); } public IAdapterToTaskFundamentalProperties<TaskElement> createForCompany( Scenario currentScenario) { Adapter result = new Adapter(); result.useScenario(currentScenario); result.setPreventCalculateResourcesText(true); return result; } public IAdapterToTaskFundamentalProperties<TaskElement> createForOrder( Scenario currentScenario, Order order, PlanningState planningState) { Adapter result = new Adapter(planningState); result.useScenario(currentScenario); result.setInitDate(asLocalDate(order.getInitDate())); result.setDeadline(asLocalDate(order.getDeadline())); return result; } private LocalDate asLocalDate(Date date) { return date != null ? LocalDate.fromDateFields(date) : null; } @Autowired private IAdHocTransactionService transactionService; private final ReentranceGuard reentranceGuard = new ReentranceGuard(); @Autowired private IOrderElementDAO orderElementDAO; @Autowired private ITaskElementDAO taskDAO; @Autowired private ICriterionDAO criterionDAO; @Autowired private IResourceAllocationDAO resourceAllocationDAO; @Autowired private IExternalCompanyDAO externalCompanyDAO; @Autowired private IResourcesSearcher searcher; @Autowired private IConfigurationDAO configurationDAO; @Autowired private IMoneyCostCalculator moneyCostCalculator; static class GanttDateAdapter extends CustomDate { private static final int DAY_MILLISECONDS = (int) Days.days(1) .toStandardDuration().getMillis(); private final IntraDayDate date; private final Duration workingDayDuration; GanttDateAdapter(IntraDayDate date, EffortDuration capacityForDay) { this.date = date; this.workingDayDuration = toMilliseconds(capacityForDay); } protected int compareToCustom(CustomDate customType) { if (customType instanceof GanttDateAdapter) { GanttDateAdapter other = (GanttDateAdapter) customType; return this.date.compareTo(other.date); } throw new RuntimeException("incompatible type: " + customType); } protected int compareToLocalDate(LocalDate localDate) { return this.date.compareTo(localDate); } public IntraDayDate getDate() { return date; } @Override public Date toDayRoundedDate() { return date.toDateTimeAtStartOfDay().toDate(); } @Override public LocalDate toLocalDate() { return date.getDate(); } @Override public LocalDate asExclusiveEnd() { return date.asExclusiveEnd(); } @Override protected boolean isEqualsToCustom(CustomDate customType) { if (customType instanceof GanttDateAdapter) { GanttDateAdapter other = (GanttDateAdapter) customType; return this.date.equals(other.date); } return false; } @Override public int hashCode() { return date.hashCode(); } @Override public int toPixels(IDatesMapper datesMapper) { int pixesUntilDate = datesMapper.toPixels(this.date.getDate()); EffortDuration effortDuration = date.getEffortDuration(); Duration durationInDay = calculateDurationInDayFor(effortDuration); int pixelsInsideDay = datesMapper.toPixels(durationInDay); return pixesUntilDate + pixelsInsideDay; } private Duration calculateDurationInDayFor(EffortDuration effortDuration) { if (workingDayDuration.getStandardSeconds() == 0) { return Duration.ZERO; } Fraction fraction = fractionOfWorkingDayFor(effortDuration); try { return new Duration(fraction.multiplyBy( Fraction.getFraction(DAY_MILLISECONDS, 1)).intValue()); } catch (ArithmeticException e) { // if fraction overflows use floating point arithmetic return new Duration( (int) (fraction.doubleValue() * DAY_MILLISECONDS)); } } @SuppressWarnings("unchecked") private Fraction fractionOfWorkingDayFor(EffortDuration effortDuration) { Duration durationInDay = toMilliseconds(effortDuration); // cast to int is safe because there are not enough seconds in // day // to overflow Fraction fraction = Fraction.getFraction( (int) durationInDay.getStandardSeconds(), (int) workingDayDuration.getStandardSeconds()); return (Fraction) Collections.min(Arrays.asList(fraction, Fraction.ONE)); } private static Duration toMilliseconds(EffortDuration duration) { return Seconds.seconds(duration.getSeconds()).toStandardDuration(); } } /** * Responsible of adaptating a {@link TaskElement} into a * {@link ITaskFundamentalProperties} <br /> * @author Óscar González Fernández <[email protected]> */ public class Adapter implements IAdapterToTaskFundamentalProperties<TaskElement> { private Scenario scenario; private LocalDate initDate; private LocalDate deadline; private boolean preventCalculateResourcesText = false; private final PlanningState planningState; private void useScenario(Scenario scenario) { this.scenario = scenario; } private void setInitDate(LocalDate initDate) { this.initDate = initDate; } private void setDeadline(LocalDate deadline) { this.deadline = deadline; } public boolean isPreventCalculateResourcesText() { return preventCalculateResourcesText; } public void setPreventCalculateResourcesText( boolean preventCalculateResourcesText) { this.preventCalculateResourcesText = preventCalculateResourcesText; } public Adapter() { this(null); } public Adapter(PlanningState planningState) { this.planningState = planningState; } private class TaskElementWrapper implements ITaskFundamentalProperties { private final TaskElement taskElement; private final Scenario currentScenario; protected TaskElementWrapper(Scenario currentScenario, TaskElement taskElement) { Validate.notNull(currentScenario); this.currentScenario = currentScenario; this.taskElement = taskElement; } private final IUpdatablePosition position = new IUpdatablePosition() { @Override public void setEndDate(GanttDate endDate) { stepsBeforePossibleReallocation(); getDatesHandler(taskElement).moveEndTo(toIntraDay(endDate)); } @Override public void setBeginDate(final GanttDate beginDate) { stepsBeforePossibleReallocation(); getDatesHandler(taskElement).moveTo(toIntraDay(beginDate)); } @Override public void resizeTo(final GanttDate endDate) { stepsBeforePossibleReallocation(); updateTaskPositionConstraint(endDate); getDatesHandler(taskElement).resizeTo(toIntraDay(endDate)); } private void stepsBeforePossibleReallocation() { taskDAO.reattach(taskElement); } @Override public void moveTo(GanttDate newStart) { if (taskElement instanceof ITaskPositionConstrained) { ITaskPositionConstrained task = (ITaskPositionConstrained) taskElement; GanttDate newEnd = inferEndFrom(newStart); if (task.getPositionConstraint() .isConstraintAppliedToStart()) { setBeginDate(newStart); } else { setEndDate(newEnd); } task.explicityMoved(toIntraDay(newStart), toIntraDay(newEnd)); } } }; @Override public void setName(String name) { taskElement.setName(name); } @Override public void setNotes(String notes) { taskElement.setNotes(notes); } @Override public String getName() { return taskElement.getName(); } @Override public String getCode() { return taskElement.getCode(); } @Override public String getProjectCode() { return taskElement.getProjectCode(); } @Override public String getNotes() { return taskElement.getNotes(); } @Override public GanttDate getBeginDate() { IntraDayDate start = taskElement.getIntraDayStartDate(); return toGantt(start); } private GanttDate toGantt(IntraDayDate date) { BaseCalendar calendar = taskElement.getCalendar(); if (calendar == null) { return TaskElementAdapter.toGantt(date); } return TaskElementAdapter.toGantt(date, calendar .getCapacityOn(PartialDay.wholeDay(date.getDate()))); } @Override public void doPositionModifications( final IModifications modifications) { reentranceGuard.entranceRequested(new IReentranceCases() { @Override public void ifNewEntrance() { transactionService.runOnReadOnlyTransaction(asTransaction(modifications)); } IOnTransaction<Void> asTransaction( final IModifications modifications) { return new IOnTransaction<Void>() { @Override public Void execute() { if (planningState != null) { planningState .reassociateResourcesWithSession(); } modifications.doIt(position); return null; } }; } @Override public void ifAlreadyInside() { modifications.doIt(position); } }); } @Override public GanttDate getEndDate() { return toGantt(taskElement.getIntraDayEndDate()); } IDatesHandler getDatesHandler(TaskElement taskElement) { return taskElement.getDatesHandler(currentScenario, searcher); } private void updateTaskPositionConstraint(GanttDate endDate) { if (taskElement instanceof ITaskPositionConstrained) { ITaskPositionConstrained task = (ITaskPositionConstrained) taskElement; PositionConstraintType constraintType = task .getPositionConstraint().getConstraintType(); if (constraintType .compareTo(PositionConstraintType.FINISH_NOT_LATER_THAN) == 0 || constraintType .compareTo(PositionConstraintType.AS_LATE_AS_POSSIBLE) == 0) { task.explicityMoved(taskElement.getIntraDayStartDate(), toIntraDay(endDate)); } } } @Override public GanttDate getHoursAdvanceBarEndDate() { return calculateLimitDateProportionalToTaskElementSize(getHoursAdvanceBarPercentage()); } @Override public BigDecimal getHoursAdvanceBarPercentage() { OrderElement orderElement = taskElement.getOrderElement(); if (orderElement == null) { return BigDecimal.ZERO; } EffortDuration totalChargedEffort = orderElement .getSumChargedEffort() != null ? orderElement .getSumChargedEffort().getTotalChargedEffort() : EffortDuration.zero(); EffortDuration estimatedEffort = taskElement.getSumOfAssignedEffort(); if(estimatedEffort.isZero()) { estimatedEffort = EffortDuration.hours(orderElement.getWorkHours()); if(estimatedEffort.isZero()) { return BigDecimal.ZERO; } } return new BigDecimal(totalChargedEffort.divivedBy( estimatedEffort).doubleValue()).setScale(2, RoundingMode.HALF_UP); } @Override public GanttDate getMoneyCostBarEndDate() { return calculateLimitDateProportionalToTaskElementSize(getMoneyCostBarPercentage()); } private GanttDate calculateLimitDateProportionalToTaskElementSize( BigDecimal proportion) { if (proportion.compareTo(BigDecimal.ZERO) == 0) { return getBeginDate(); } IntraDayDate start = taskElement.getIntraDayStartDate(); IntraDayDate end = taskElement.getIntraDayEndDate(); EffortDuration effortBetween = start.effortUntil(end); int seconds = new BigDecimal(effortBetween.getSeconds()) .multiply(proportion).toBigInteger().intValue(); return TaskElementAdapter.toGantt( start.addEffort(EffortDuration.seconds(seconds)), EffortDuration.hours(8)); } @Override public BigDecimal getMoneyCostBarPercentage() { return MoneyCostCalculator.getMoneyCostProportion( getMoneyCost(), getBudget()); } private BigDecimal getBudget() { if ((taskElement == null) || (taskElement.getOrderElement() == null)) { return BigDecimal.ZERO; } return taskElement.getOrderElement().getBudget(); } private BigDecimal getTotalCalculatedBudget() { if ((taskElement == null) || (taskElement.getOrderElement() == null)) { return BigDecimal.ZERO; } return transactionService .runOnReadOnlyTransaction(new IOnTransaction<BigDecimal>() { @Override public BigDecimal execute() { return taskElement.getOrderElement() .getTotalBudget(); } }); } private BigDecimal getTotalBudget() { if ((taskElement == null) || (taskElement.getOrderElement() == null)) { return BigDecimal.ZERO; } return taskElement.getOrderElement().getResourcesBudget(); } private BigDecimal getMoneyCost() { if ((taskElement == null) || (taskElement.getOrderElement() == null)) { return BigDecimal.ZERO; } return transactionService .runOnReadOnlyTransaction(new IOnTransaction<BigDecimal>() { @Override public BigDecimal execute() { return moneyCostCalculator.getTotalMoneyCost(taskElement .getOrderElement()); } }); } private BigDecimal getHoursMoneyCost() { if ((taskElement == null) || (taskElement.getOrderElement() == null)) { return BigDecimal.ZERO; } return transactionService .runOnReadOnlyTransaction(new IOnTransaction<BigDecimal>() { @Override public BigDecimal execute() { return moneyCostCalculator.getHoursMoneyCost(taskElement.getOrderElement()); } }); } private BigDecimal getExpensesMoneyCost() { if ((taskElement == null) || (taskElement.getOrderElement() == null)) { return BigDecimal.ZERO; } return transactionService .runOnReadOnlyTransaction(new IOnTransaction<BigDecimal>() { @Override public BigDecimal execute() { return moneyCostCalculator.getExpensesMoneyCost(taskElement .getOrderElement()); } }); } @Override public GanttDate getAdvanceBarEndDate(String progressType) { return getAdvanceBarEndDate(ProgressType.asEnum(progressType)); } private GanttDate getAdvanceBarEndDate(ProgressType progressType) { BigDecimal advancePercentage = BigDecimal.ZERO; if (taskElement.getOrderElement() != null) { advancePercentage = taskElement .getAdvancePercentage(progressType); } return getAdvanceBarEndDate(advancePercentage); } @Override public GanttDate getAdvanceBarEndDate() { return getAdvanceBarEndDate(getAdvancePercentage()); } private boolean isTaskRoot(TaskElement taskElement) { return taskElement instanceof TaskGroup && taskElement.getParent() == null; } private ProgressType getProgressTypeFromConfiguration() { return transactionService .runOnReadOnlyTransaction(new IOnTransaction<ProgressType>() { @Override public ProgressType execute() { return configurationDAO.getConfiguration() .getProgressType(); } }); } private GanttDate getAdvanceBarEndDate(BigDecimal advancePercentage) { return calculateLimitDateProportionalToTaskElementSize(advancePercentage); } @Override public String getTooltipText() { if (taskElement.isMilestone() || taskElement.getOrderElement() == null) { return ""; } return transactionService .runOnReadOnlyTransaction(new IOnTransaction<String>() { @Override public String execute() { orderElementDAO.reattach(taskElement .getOrderElement()); return buildTooltipText(); } }); } @Override public String getLabelsText() { if (taskElement.isMilestone() || taskElement.getOrderElement() == null) { return ""; } return transactionService .runOnReadOnlyTransaction(new IOnTransaction<String>() { @Override public String execute() { orderElementDAO.reattach(taskElement .getOrderElement()); return buildLabelsText(); } }); } @Override public String getResourcesText() { if (isPreventCalculateResourcesText() || taskElement.getOrderElement() == null) { return ""; } try { return transactionService .runOnAnotherReadOnlyTransaction(new IOnTransaction<String>() { @Override public String execute() { orderElementDAO.reattach(taskElement .getOrderElement()); if (taskElement.isSubcontracted()) { externalCompanyDAO.reattach(taskElement .getSubcontractedCompany()); } return buildResourcesText(); } }); } catch (Exception e) { LOG.error("error calculating resources text", e); return ""; } } private Set<Label> getLabelsFromElementAndPredecesors( OrderElement order) { if (order != null) { if (order.getParent() == null) { return order.getLabels(); } else { HashSet<Label> labels = new HashSet<Label>( order.getLabels()); labels.addAll(getLabelsFromElementAndPredecesors(order .getParent())); return labels; } } return new HashSet<Label>(); } private String buildLabelsText() { List<String> result = new ArrayList<String>(); if (taskElement.getOrderElement() != null) { Set<Label> labels = getLabelsFromElementAndPredecesors(taskElement .getOrderElement()); for (Label label : labels) { String representation = label.getName(); if (!result.contains(representation)) { result.add(representation); } } } Collections.sort(result); return StringUtils.join(result, ", "); } private String buildResourcesText() { List<String> result = new ArrayList<String>(); for (ResourceAllocation<?> each : taskElement .getSatisfiedResourceAllocations()) { if (each instanceof SpecificResourceAllocation) { for (Resource r : each.getAssociatedResources()) { String representation = r.getName(); if (!result.contains(representation)) { result.add(representation); } } } else { String representation = extractRepresentationForGeneric((GenericResourceAllocation) each); if (!result.contains(representation)) { result.add(representation); } } } if (taskElement.isSubcontracted()) { result.add(taskElement.getSubcontractionName()); } Collections.sort(result); return StringUtils.join(result, "; "); } private String extractRepresentationForGeneric( GenericResourceAllocation generic) { if (!generic.isNewObject()) { resourceAllocationDAO.reattach(generic); } Set<Criterion> criterions = generic.getCriterions(); List<String> forCriterionRepresentations = new ArrayList<String>(); if (!criterions.isEmpty()) { for (Criterion c : criterions) { criterionDAO.reattachUnmodifiedEntity(c); forCriterionRepresentations.add(c.getName()); } } else { forCriterionRepresentations.add((_("All workers"))); } return "[" + StringUtils.join(forCriterionRepresentations, ", ") + "]"; } @Override public String updateTooltipText() { return buildTooltipText(); } @Override public String updateTooltipText(String progressType) { return buildTooltipText(ProgressType.asEnum(progressType)); } @Override public BigDecimal getAdvancePercentage() { if (taskElement != null) { BigDecimal advancePercentage; if (isTaskRoot(taskElement)) { ProgressType progressType = getProgressTypeFromConfiguration(); advancePercentage = taskElement .getAdvancePercentage(progressType); } else { advancePercentage = taskElement.getAdvancePercentage(); } return advancePercentage; } return new BigDecimal(0); } private String buildTooltipText() { return buildTooltipText(asPercentage(getAdvancePercentage())); } private BigDecimal asPercentage(BigDecimal value) { return value.multiply(BigDecimal.valueOf(100)).setScale(2, RoundingMode.DOWN); } private String buildTooltipText(BigDecimal progressPercentage) { StringBuilder result = new StringBuilder(); result.append("<strong>" + getName() + "</strong><br/>"); result.append(_("Progress") + ": ").append(progressPercentage) .append("% , "); result.append(_("Hours invested") + ": ") .append(getHoursAdvanceBarPercentage().multiply( new BigDecimal(100))).append("% <br/>"); if (taskElement.getOrderElement() instanceof Order) { result.append(_("State") + ": ").append(getOrderState()); } else { String budget = addCurrencySymbol(getTotalCalculatedBudget()); String moneyCost = addCurrencySymbol(getMoneyCost()); String costHours = addCurrencySymbol(getHoursMoneyCost()); String costExpenses = addCurrencySymbol(getExpensesMoneyCost()); result.append( _("Budget: {0}, Consumed: {1} ({2}%)", budget, moneyCost, getMoneyCostBarPercentage().multiply(new BigDecimal(100)))) .append("<br/>"); result.append( _( "Hours cost: {0}, Expenses cost: {1}", costHours, costExpenses)); } String labels = buildLabelsText(); if (!labels.equals("")) { result.append("<div class='tooltip-labels'>" + _("Labels") + ": " + labels + "</div>"); } return result.toString(); } private String buildTooltipText(ProgressType progressType) { return buildTooltipText(asPercentage(taskElement .getAdvancePercentage(progressType))); } private String getOrderState() { String cssClass; OrderStatusEnum state = taskElement.getOrderElement() .getOrder().getState(); if (Arrays.asList(OrderStatusEnum.ACCEPTED, OrderStatusEnum.OFFERED, OrderStatusEnum.STARTED, OrderStatusEnum.OUTSOURCED).contains( state)) { if (taskElement.getAssignedStatus() == "assigned") { cssClass = "order-open-assigned"; } else { cssClass = "order-open-unassigned"; } } else { cssClass = "order-closed"; } return "<font class='" + cssClass + "'>" + _(state.toString()) + "</font>"; } @Override public List<Constraint<GanttDate>> getStartConstraints() { return getStartConstraintsFor(this.taskElement, initDate); } @Override public List<Constraint<GanttDate>> getEndConstraints() { return getEndConstraintsFor(this.taskElement, deadline); } @Override public List<Constraint<GanttDate>> getCurrentLengthConstraint() { if (taskElement instanceof Task) { Task task = (Task) taskElement; if (task.getAllocationDirection() == Direction.FORWARD) { return Collections .singletonList(biggerOrEqualThan(getEndDate())); } } return Collections.emptyList(); } private GanttDate inferEndFrom(GanttDate newStart) { if (taskElement instanceof Task) { Task task = (Task) taskElement; return toGantt(task .calculateEndKeepingLength(toIntraDay(newStart))); } return newStart; } @Override public Date getDeadline() { LocalDate deadline = taskElement.getDeadline(); if (deadline == null) { return null; } return deadline.toDateTimeAtStartOfDay().toDate(); } @Override public void setDeadline(Date date) { if (date != null) { taskElement.setDeadline(LocalDate.fromDateFields(date)); } else { taskElement.setDeadline(null); } } @Override public GanttDate getConsolidatedline() { if (!taskElement.isLeaf() || !taskElement.hasConsolidations()) { return null; } LocalDate consolidatedline = ((Task) taskElement) .getFirstDayNotConsolidated().getDate(); return TaskElementAdapter.toGantt(consolidatedline); } @Override public boolean isSubcontracted() { return taskElement.isSubcontracted(); } @Override public boolean isLimiting() { return taskElement.isLimiting(); } @Override public boolean isLimitingAndHasDayAssignments() { return taskElement.isLimitingAndHasDayAssignments(); } public boolean hasConsolidations() { return taskElement.hasConsolidations(); } @Override public boolean canBeExplicitlyResized() { return taskElement.canBeExplicitlyResized(); } @Override public String getAssignedStatus() { return taskElement.getAssignedStatus(); } @Override public boolean isFixed() { return taskElement.isLimitingAndHasDayAssignments() || taskElement.hasConsolidations() || taskElement.isUpdatedFromTimesheets(); } @Override public boolean isManualAnyAllocation() { return taskElement.isTask() && ((Task) taskElement).isManualAnyAllocation(); } @Override public boolean belongsClosedProject() { return taskElement.belongsClosedProject(); } @Override public boolean isRoot() { return taskElement.isRoot(); } @Override public boolean isUpdatedFromTimesheets() { return taskElement.isUpdatedFromTimesheets(); } @Override public Date getFirstTimesheetDate() { OrderElement orderElement = taskElement.getOrderElement(); if (orderElement != null) { return orderElement.getFirstTimesheetDate(); } return null; } @Override public Date getLastTimesheetDate() { OrderElement orderElement = taskElement.getOrderElement(); if (orderElement != null) { return orderElement.getLastTimesheetDate(); } return null; } @Override public ProjectStatusEnum getProjectHoursStatus() { if (taskElement.isTask()) { return getProjectHourStatus(taskElement.getOrderElement()); } List<TaskElement> taskElements = taskElement.getAllChildren(); ProjectStatusEnum status = ProjectStatusEnum.AS_PLANNED; ProjectStatusEnum highestStatus = null; for (TaskElement taskElement : taskElements) { if (!taskElement.isTask()) { continue; } status = getProjectHourStatus(taskElement.getOrderElement()); if (status == ProjectStatusEnum.MARGIN_EXCEEDED) { highestStatus = ProjectStatusEnum.MARGIN_EXCEEDED; break; } if (status == ProjectStatusEnum.WITHIN_MARGIN) { highestStatus = ProjectStatusEnum.WITHIN_MARGIN; } } if (highestStatus != null) { status = highestStatus; } return status; } /** * Returns {@link ProjectStatusEnum} for the specified * <code>orderElement</code> * * @param orderElement */ private ProjectStatusEnum getProjectHourStatus(OrderElement orderElement) { EffortDuration sumChargedEffort = getSumChargedEffort(orderElement); EffortDuration estimatedEffort = getEstimatedEffort(orderElement); if (sumChargedEffort.isZero() || sumChargedEffort.compareTo(estimatedEffort) <= 0) { return ProjectStatusEnum.AS_PLANNED; } EffortDuration withMarginEstimatedHours = orderElement .getWithMarginCalculatedHours(); if (estimatedEffort.compareTo(sumChargedEffort) < 0 && sumChargedEffort.compareTo(withMarginEstimatedHours) <= 0) { return ProjectStatusEnum.WITHIN_MARGIN; } return ProjectStatusEnum.MARGIN_EXCEEDED; } /** * Returns sum charged effort for the specified * <code>orderElement</code> * * @param orderElement */ private EffortDuration getSumChargedEffort(OrderElement orderElement) { SumChargedEffort sumChargedEffort = orderElement .getSumChargedEffort(); EffortDuration totalChargedEffort = sumChargedEffort != null ? sumChargedEffort .getTotalChargedEffort() : EffortDuration.zero(); return totalChargedEffort; } /** * Returns the estimated effort for the specified * <code>orderElement</code> * * @param orderElement */ private EffortDuration getEstimatedEffort(OrderElement orderElement) { return EffortDuration.fromHoursAsBigDecimal(new BigDecimal( orderElement.getWorkHours()).setScale(2)); } @Override public ProjectStatusEnum getProjectBudgetStatus() { if (taskElement.isTask()) { return getProjectBudgetStatus(taskElement.getOrderElement()); } List<TaskElement> taskElements = taskElement.getAllChildren(); ProjectStatusEnum status = ProjectStatusEnum.AS_PLANNED; ProjectStatusEnum highestStatus = null; for (TaskElement taskElement : taskElements) { if (!taskElement.isTask()) { continue; } status = getProjectBudgetStatus(taskElement .getOrderElement()); if (status == ProjectStatusEnum.MARGIN_EXCEEDED) { highestStatus = ProjectStatusEnum.MARGIN_EXCEEDED; break; } if (status == ProjectStatusEnum.WITHIN_MARGIN) { highestStatus = ProjectStatusEnum.WITHIN_MARGIN; } } if (highestStatus != null) { status = highestStatus; } return status; } /** * Returns {@link ProjectStatusEnum} for the specified * <code>orderElement</code> * * @param orderElement */ private ProjectStatusEnum getProjectBudgetStatus( OrderElement orderElement) { BigDecimal budget = orderElement.getBudget(); BigDecimal totalExpense = getTotalExpense(orderElement); BigDecimal withMarginCalculatedBudget = orderElement .getWithMarginCalculatedBudget(); if (totalExpense.compareTo(budget) <= 0) { return ProjectStatusEnum.AS_PLANNED; } if (budget.compareTo(totalExpense) < 0 && totalExpense.compareTo(withMarginCalculatedBudget) <= 0) { return ProjectStatusEnum.WITHIN_MARGIN; } return ProjectStatusEnum.MARGIN_EXCEEDED; } /** * Returns total expense for the specified <code>orderElement</code> * * @param orderElement */ public BigDecimal getTotalExpense(OrderElement orderElement) { BigDecimal total = BigDecimal.ZERO; SumExpenses sumExpenses = orderElement.getSumExpenses(); if (sumExpenses != null) { BigDecimal directExpenes = sumExpenses .getTotalDirectExpenses(); BigDecimal indirectExpense = sumExpenses .getTotalIndirectExpenses(); if (directExpenes != null) { total = total.add(directExpenes); } if (indirectExpense != null) { total = total.add(indirectExpense); } } return total; } @Override public String getTooltipTextForProjectHoursStatus() { if (taskElement.isTask()) { return buildHoursTooltipText(taskElement.getOrderElement()); } return null; } @Override public String getTooltipTextForProjectBudgetStatus() { if (taskElement.isTask()) { return buildBudgetTooltipText(taskElement.getOrderElement()); } return null; } /** * Builds hours tooltiptext for the specified * <code>orderElement</code> * * @param orderElement */ private String buildHoursTooltipText(OrderElement orderElement) { StringBuilder result = new StringBuilder(); Integer margin = orderElement.getOrder().getHoursMargin() != null ? orderElement .getOrder().getHoursMargin() : 0; result.append(_("Hours-status") + "\n"); result.append(_("Project margin: {0}% ({1} hours)={2} hours", margin, orderElement.getWorkHours(), orderElement.getWithMarginCalculatedHours())); String totalEffortHours = orderElement.getEffortAsString(); result.append(_(". Already registered: {0} hours", totalEffortHours)); return result.toString(); } private String buildBudgetTooltipText(OrderElement orderElement) { StringBuilder result = new StringBuilder(); Integer margin = orderElement.getOrder().getBudgetMargin() != null ? orderElement .getOrder().getBudgetMargin() : 0; result.append(_("Budget-status") + "\n"); result.append(_("Project margin: {0}% ({1})={2}", margin, addCurrencySymbol(orderElement.getBudget()), addCurrencySymbol(orderElement .getWithMarginCalculatedBudget()))); BigDecimal totalExpense = getTotalExpense(orderElement); result.append(_(". Already spent: {0}", addCurrencySymbol(totalExpense))); return result.toString(); } } @Override public ITaskFundamentalProperties adapt(final TaskElement taskElement) { return new TaskElementWrapper(scenario, taskElement); } @Override public List<DomainDependency<TaskElement>> getIncomingDependencies( TaskElement taskElement) { return toDomainDependencies(taskElement .getDependenciesWithThisDestination()); } @Override public List<DomainDependency<TaskElement>> getOutcomingDependencies( TaskElement taskElement) { return toDomainDependencies(taskElement .getDependenciesWithThisOrigin()); } private List<DomainDependency<TaskElement>> toDomainDependencies( Collection<? extends Dependency> dependencies) { List<DomainDependency<TaskElement>> result = new ArrayList<DomainDependency<TaskElement>>(); for (Dependency dependency : dependencies) { result.add(DomainDependency.createDependency( dependency.getOrigin(), dependency.getDestination(), toGanntType(dependency.getType()))); } return result; } private DependencyType toGanntType(Type type) { switch (type) { case END_START: return DependencyType.END_START; case START_END: return DependencyType.START_END; case START_START: return DependencyType.START_START; case END_END: return DependencyType.END_END; default: throw new RuntimeException(_("{0} not supported yet", type)); } } private Type toDomainType(DependencyType type) { switch (type) { case END_START: return Type.END_START; case START_END: return Type.START_END; case START_START: return Type.START_START; case END_END: return Type.END_END; default: throw new RuntimeException(_("{0} not supported yet", type)); } } @Override public void addDependency(DomainDependency<TaskElement> dependency) { TaskElement source = dependency.getSource(); TaskElement destination = dependency.getDestination(); Type domainType = toDomainType(dependency.getType()); Dependency.create(source, destination, domainType); } @Override public boolean canAddDependency(DomainDependency<TaskElement> dependency) { return true; } @Override public void removeDependency(DomainDependency<TaskElement> dependency) { TaskElement source = dependency.getSource(); Type type = toDomainType(dependency.getType()); source.removeDependencyWithDestination(dependency.getDestination(), type); } @Override public void doRemovalOf(TaskElement taskElement) { taskElement.detach(); TaskGroup parent = taskElement.getParent(); if (parent != null) { parent.remove(taskElement); } } } }
Remove unused method
libreplan-webapp/src/main/java/org/libreplan/web/planner/TaskElementAdapter.java
Remove unused method
<ide><path>ibreplan-webapp/src/main/java/org/libreplan/web/planner/TaskElementAdapter.java <ide> }); <ide> } <ide> <del> private BigDecimal getTotalBudget() { <del> if ((taskElement == null) <del> || (taskElement.getOrderElement() == null)) { <del> return BigDecimal.ZERO; <del> } <del> return taskElement.getOrderElement().getResourcesBudget(); <del> } <del> <ide> private BigDecimal getMoneyCost() { <ide> if ((taskElement == null) <ide> || (taskElement.getOrderElement() == null)) {
Java
apache-2.0
63a82dd6dba940ef08ad0309d3353955799f0574
0
caiocteodoro/nfe,fincatto/nfe,eldevanjr/nfe,danieldhp/nfe,wmixvideo/nfe
package com.fincatto.documentofiscal.nfe400.classes.nota.consulta; import java.util.List; import java.time.LocalDateTime; import org.simpleframework.xml.*; import com.fincatto.documentofiscal.DFAmbiente; import com.fincatto.documentofiscal.DFBase; import com.fincatto.documentofiscal.DFUnidadeFederativa; import com.fincatto.documentofiscal.nfe400.classes.NFProtocolo; import com.fincatto.documentofiscal.nfe400.classes.evento.cancelamento.NFRetornoCancelamento; @Root(name = "retConsSitNFe", strict = false) @Namespace(reference = "http://www.portalfiscal.inf.br/nfe") public class NFNotaConsultaRetorno extends DFBase { private static final long serialVersionUID = -5747228973124291025L; @Attribute(name = "versao", required = false) private String versao; @Element(name = "tpAmb", required = true) private DFAmbiente ambiente; @Element(name = "verAplic", required = true) private String versaoAplicacao; @Element(name = "cStat", required = true) private String status; @Element(name = "xMotivo", required = true) private String motivo; @Element(name = "cUF", required = true) private DFUnidadeFederativa uf; @Element(name = "dhRecbto", required = false) private LocalDateTime dataHoraRecibo; @Element(name = "chNFe", required = true) private String chave; @Element(name = "protNFe", required = false) protected NFProtocolo protocolo; @Element(name = "retCancNFe", required = false) private NFRetornoCancelamento protocoloCancelamento; @ElementList(entry = "procEventoNFe", inline = true, required = false) private List<NFProtocoloEvento> protocoloEvento; public NFNotaConsultaRetorno() { this.versao = null; this.ambiente = null; this.versaoAplicacao = null; this.status = null; this.motivo = null; this.uf = null; this.chave = null; this.protocolo = null; } public String getVersao() { return this.versao; } public void setVersao(final String versao) { this.versao = versao; } public DFAmbiente getAmbiente() { return this.ambiente; } public void setAmbiente(final DFAmbiente ambiente) { this.ambiente = ambiente; } public String getVersaoAplicacao() { return this.versaoAplicacao; } public void setVersaoAplicacao(final String versaoAplicacao) { this.versaoAplicacao = versaoAplicacao; } public String getStatus() { return this.status; } public void setStatus(final String status) { this.status = status; } public String getMotivo() { return this.motivo; } public void setMotivo(final String motivo) { this.motivo = motivo; } public DFUnidadeFederativa getUf() { return this.uf; } public void setUf(final DFUnidadeFederativa uf) { this.uf = uf; } public String getChave() { return this.chave; } public List<NFProtocoloEvento> getProtocoloEvento() { return this.protocoloEvento; } public NFRetornoCancelamento getProtocoloCancelamento() { return this.protocoloCancelamento; } public void setChave(final String chave) { this.chave = chave; } public NFProtocolo getProtocolo() { return this.protocolo; } public void setProtocolo(final NFProtocolo protocolo) { this.protocolo = protocolo; } public LocalDateTime getDataHoraRecibo() { return this.dataHoraRecibo; } public void setDataHoraRecibo(final LocalDateTime dataHoraRecibo) { this.dataHoraRecibo = dataHoraRecibo; } public void setProtocoloEvento(final List<NFProtocoloEvento> protocoloEvento) { this.protocoloEvento = protocoloEvento; } public void setProtocoloCancelamento(final NFRetornoCancelamento protocoloCancelamento) { this.protocoloCancelamento = protocoloCancelamento; } }
src/main/java/com/fincatto/documentofiscal/nfe400/classes/nota/consulta/NFNotaConsultaRetorno.java
package com.fincatto.documentofiscal.nfe400.classes.nota.consulta; import java.util.List; import java.time.LocalDateTime; import org.simpleframework.xml.*; import com.fincatto.documentofiscal.DFAmbiente; import com.fincatto.documentofiscal.DFBase; import com.fincatto.documentofiscal.DFUnidadeFederativa; import com.fincatto.documentofiscal.nfe400.classes.NFProtocolo; import com.fincatto.documentofiscal.nfe400.classes.evento.cancelamento.NFRetornoCancelamento; @Root(name = "retConsSitNFe", strict = false) @Namespace(reference = "http://www.portalfiscal.inf.br/nfe") public class NFNotaConsultaRetorno extends DFBase { private static final long serialVersionUID = -5747228973124291025L; @Attribute(name = "versao", required = true) private String versao; @Element(name = "tpAmb", required = true) private DFAmbiente ambiente; @Element(name = "verAplic", required = true) private String versaoAplicacao; @Element(name = "cStat", required = true) private String status; @Element(name = "xMotivo", required = true) private String motivo; @Element(name = "cUF", required = true) private DFUnidadeFederativa uf; @Element(name = "dhRecbto", required = false) private LocalDateTime dataHoraRecibo; @Element(name = "chNFe", required = true) private String chave; @Element(name = "protNFe", required = false) protected NFProtocolo protocolo; @Element(name = "retCancNFe", required = false) private NFRetornoCancelamento protocoloCancelamento; @ElementList(entry = "procEventoNFe", inline = true, required = false) private List<NFProtocoloEvento> protocoloEvento; public NFNotaConsultaRetorno() { this.versao = null; this.ambiente = null; this.versaoAplicacao = null; this.status = null; this.motivo = null; this.uf = null; this.chave = null; this.protocolo = null; } public String getVersao() { return this.versao; } public void setVersao(final String versao) { this.versao = versao; } public DFAmbiente getAmbiente() { return this.ambiente; } public void setAmbiente(final DFAmbiente ambiente) { this.ambiente = ambiente; } public String getVersaoAplicacao() { return this.versaoAplicacao; } public void setVersaoAplicacao(final String versaoAplicacao) { this.versaoAplicacao = versaoAplicacao; } public String getStatus() { return this.status; } public void setStatus(final String status) { this.status = status; } public String getMotivo() { return this.motivo; } public void setMotivo(final String motivo) { this.motivo = motivo; } public DFUnidadeFederativa getUf() { return this.uf; } public void setUf(final DFUnidadeFederativa uf) { this.uf = uf; } public String getChave() { return this.chave; } public List<NFProtocoloEvento> getProtocoloEvento() { return this.protocoloEvento; } public NFRetornoCancelamento getProtocoloCancelamento() { return this.protocoloCancelamento; } public void setChave(final String chave) { this.chave = chave; } public NFProtocolo getProtocolo() { return this.protocolo; } public void setProtocolo(final NFProtocolo protocolo) { this.protocolo = protocolo; } public LocalDateTime getDataHoraRecibo() { return this.dataHoraRecibo; } public void setDataHoraRecibo(final LocalDateTime dataHoraRecibo) { this.dataHoraRecibo = dataHoraRecibo; } public void setProtocoloEvento(final List<NFProtocoloEvento> protocoloEvento) { this.protocoloEvento = protocoloEvento; } public void setProtocoloCancelamento(final NFRetornoCancelamento protocoloCancelamento) { this.protocoloCancelamento = protocoloCancelamento; } }
quando consumo indevido na sefaz do paraná, não retorna o campo versão.
src/main/java/com/fincatto/documentofiscal/nfe400/classes/nota/consulta/NFNotaConsultaRetorno.java
quando consumo indevido na sefaz do paraná, não retorna o campo versão.
<ide><path>rc/main/java/com/fincatto/documentofiscal/nfe400/classes/nota/consulta/NFNotaConsultaRetorno.java <ide> public class NFNotaConsultaRetorno extends DFBase { <ide> private static final long serialVersionUID = -5747228973124291025L; <ide> <del> @Attribute(name = "versao", required = true) <add> @Attribute(name = "versao", required = false) <ide> private String versao; <ide> <ide> @Element(name = "tpAmb", required = true)
Java
apache-2.0
5c1118ba26628c33d6f568cc61d39b6bb31b60ff
0
kwmt/GitHubSearch
package net.kwmt27.codesearch.entity.events; import com.google.gson.annotations.SerializedName; import net.kwmt27.codesearch.entity.EventEntity; import net.kwmt27.codesearch.entity.payloads.StatusEntity; /** * https://developer.github.com/v3/activity/events/types/#statusevent * * @deprecated Events of this type are not visible in timelines. These events are only used to trigger hooks. */ @Deprecated public class StatusEvent extends EventEntity { @SerializedName("payload") private StatusEntity mStatusEntity; }
app/src/main/java/net/kwmt27/codesearch/entity/events/StatusEvent.java
package net.kwmt27.codesearch.entity.events; import com.google.gson.annotations.SerializedName; import net.kwmt27.codesearch.entity.EventEntity; import net.kwmt27.codesearch.entity.payloads.StatusEntity; public class StatusEvent extends EventEntity { @SerializedName("payload") private StatusEntity mStatusEntity; // @Override // public void action(TextView view, ClickableSpan clickableSpan) { // String repoName = getRepo().getName(); // String action = "status " + repoName; // TODO // view.setText(action); // TextViewUtil.addLink(view, repoName, clickableSpan); // } }
support status event
app/src/main/java/net/kwmt27/codesearch/entity/events/StatusEvent.java
support status event
<ide><path>pp/src/main/java/net/kwmt27/codesearch/entity/events/StatusEvent.java <ide> import net.kwmt27.codesearch.entity.EventEntity; <ide> import net.kwmt27.codesearch.entity.payloads.StatusEntity; <ide> <add>/** <add> * https://developer.github.com/v3/activity/events/types/#statusevent <add> * <add> * @deprecated Events of this type are not visible in timelines. These events are only used to trigger hooks. <add> */ <add>@Deprecated <ide> public class StatusEvent extends EventEntity { <ide> <ide> @SerializedName("payload") <ide> private StatusEntity mStatusEntity; <ide> <del>// @Override <del>// public void action(TextView view, ClickableSpan clickableSpan) { <del>// String repoName = getRepo().getName(); <del>// String action = "status " + repoName; // TODO <del>// view.setText(action); <del>// TextViewUtil.addLink(view, repoName, clickableSpan); <del>// } <ide> <ide> }
Java
mit
error: pathspec 'src/main/java/java/util/function/Consumer.java' did not match any file(s) known to git
5614b3154ee03e3a116654a06c112912146d5ff5
1
dominikschreiber/underscore.java
package java.util.function; /** * <p>Represents an operation that accepts a single input argument and returns no result. * Unlike most other functional interfaces, {@code Consumer} is expected to operate * via side-effects.</p> * <p>This is a functional interface whose functional method is {@link #accept(Object)}</p> * @param <T> the type of the input to the operation * @see <a href="http://docs.oracle.com/javase/8/docs/api/java/util/function/Consumer.html">Java 8 Consumer</a> */ public interface Consumer<T> { /** * <p>Performs this operation on the given argument.</p> * @param t the input argument */ public void accept(T t); // not implementable pre Java 8: // public default Consumer<T> andThen(Consumer<? super T> after); }
src/main/java/java/util/function/Consumer.java
added Consumer<T> to replace Function<T, Void>
src/main/java/java/util/function/Consumer.java
added Consumer<T> to replace Function<T, Void>
<ide><path>rc/main/java/java/util/function/Consumer.java <add>package java.util.function; <add> <add>/** <add> * <p>Represents an operation that accepts a single input argument and returns no result. <add> * Unlike most other functional interfaces, {@code Consumer} is expected to operate <add> * via side-effects.</p> <add> * <p>This is a functional interface whose functional method is {@link #accept(Object)}</p> <add> * @param <T> the type of the input to the operation <add> * @see <a href="http://docs.oracle.com/javase/8/docs/api/java/util/function/Consumer.html">Java 8 Consumer</a> <add> */ <add>public interface Consumer<T> { <add> /** <add> * <p>Performs this operation on the given argument.</p> <add> * @param t the input argument <add> */ <add> public void accept(T t); <add> <add> // not implementable pre Java 8: <add> <add> // public default Consumer<T> andThen(Consumer<? super T> after); <add>}
JavaScript
bsd-3-clause
error: pathspec 'harvardcards/static/js/views/DeckCreateModal.js' did not match any file(s) known to git
ac2f962011a0e008a4e0fe031fa63e0f874f0cc2
1
Harvard-ATG/HarvardCards,Harvard-ATG/HarvardCards,Harvard-ATG/HarvardCards,Harvard-ATG/HarvardCards
define(['jquery', 'jqueryui'], function($, $ui) { var DeckCreateModal = function(options) { this.options = options; this.btnSelector = this.options.btnSelector; this.dialogSelector = this.options.dialogSelector; this.form_name = this.options.form_name; }; $.extend(DeckCreateModal.prototype, { init: function() { this.modalHandler = $.proxy(this.modalHandler, this); $(this.btnSelector).click(this.modalHandler); }, modalHandler: function(evt) { $(this.dialogSelector).dialog({ modal: true, width: '60%', position: { my: "top", at: "top+20px", of: window }, closeOnEscape: true, buttons:[{ click: $.noop, text: "Create Deck", type: "Submit", form: this.form_name }, { click: function() { $(this).dialog("close"); }, text: "Close" }], open: function(event, ui) { $( this ).find( "[type=submit]" ).hide(); //console.log("event: open", event, ui); } }); return false; } }); return DeckCreateModal; });
harvardcards/static/js/views/DeckCreateModal.js
overlay add deck form works
harvardcards/static/js/views/DeckCreateModal.js
overlay add deck form works
<ide><path>arvardcards/static/js/views/DeckCreateModal.js <add>define(['jquery', 'jqueryui'], function($, $ui) { <add> <add> var DeckCreateModal = function(options) { <add> this.options = options; <add> this.btnSelector = this.options.btnSelector; <add> this.dialogSelector = this.options.dialogSelector; <add> this.form_name = this.options.form_name; <add> }; <add> <add> $.extend(DeckCreateModal.prototype, { <add> init: function() { <add> this.modalHandler = $.proxy(this.modalHandler, this); <add> <add> $(this.btnSelector).click(this.modalHandler); <add> }, <add> modalHandler: function(evt) { <add> $(this.dialogSelector).dialog({ <add> modal: true, <add> width: '60%', <add> position: { my: "top", at: "top+20px", of: window }, <add> closeOnEscape: true, <add> buttons:[{ <add> click: $.noop, <add> text: "Create Deck", <add> type: "Submit", <add> form: this.form_name <add> }, <add> { <add> click: function() { <add> $(this).dialog("close"); <add> }, <add> text: "Close" <add> }], <add> open: function(event, ui) { <add> $( this ).find( "[type=submit]" ).hide(); <add> //console.log("event: open", event, ui); <add> } <add> }); <add> return false; <add> } <add> }); <add> <add> return DeckCreateModal; <add>});
JavaScript
apache-2.0
c6c87e137a15864aaf438c86d13cbae2d033bb99
0
rust-lang/crates.io,rust-lang/crates.io,rust-lang/crates.io,rust-lang/crates.io
import Component from '@ember/component'; import { alias } from '@ember/object/computed'; import { computed } from '@ember/object'; import RlDropdownContainer from './rl-dropdown-container'; export default Component.extend({ classNames: ['rl-dropdown'], classNameBindings: ['isExpanded:open'], dropdownContainer: computed(function() { return this.nearestOfType(RlDropdownContainer); }), isExpanded: alias('dropdownContainer.dropdownExpanded'), click(event) { let closeOnChildClick = 'a:link'; let $target = event.target; let $c = this.element; if ($target === $c) { return; } if ($target.closest(closeOnChildClick, $c).length) { this.set('isExpanded', false); } }, });
app/components/rl-dropdown.js
import Component from '@ember/component'; import { alias } from '@ember/object/computed'; import { computed } from '@ember/object'; import $ from 'jquery'; import RlDropdownContainer from './rl-dropdown-container'; export default Component.extend({ classNames: ['rl-dropdown'], classNameBindings: ['isExpanded:open'], dropdownContainer: computed(function() { return this.nearestOfType(RlDropdownContainer); }), isExpanded: alias('dropdownContainer.dropdownExpanded'), click(event) { let closeOnChildClick = 'a:link'; let $target = event.target; let $c = this.element; if ($target === $c) { return; } if ($target.closest(closeOnChildClick, $c).length) { this.set('isExpanded', false); } }, });
remove unused import
app/components/rl-dropdown.js
remove unused import
<ide><path>pp/components/rl-dropdown.js <ide> import Component from '@ember/component'; <ide> import { alias } from '@ember/object/computed'; <ide> import { computed } from '@ember/object'; <del>import $ from 'jquery'; <ide> <ide> import RlDropdownContainer from './rl-dropdown-container'; <ide>
Java
bsd-3-clause
error: pathspec 'api/src/gov/nih/nci/security/system/ApplicationSessionFactory.java' did not match any file(s) known to git
715aacd7b3d8b6483e9ced3be5309691e8046ad6
1
CBIIT/common-security-module,CBIIT/common-security-module,NCIP/common-security-module,NCIP/common-security-module,CBIIT/common-security-module,CBIIT/common-security-module,NCIP/common-security-module,CBIIT/common-security-module,NCIP/common-security-module
/* * Created on Dec 30, 2004 * * TODO To change the template for this generated file go to * Window - Preferences - Java - Code Style - Code Templates */ package gov.nih.nci.security.system; import java.util.*; import net.sf.hibernate.SessionFactory; import net.sf.hibernate.cfg.Configuration; import java.io.*; /** * @author kumarvi * * TODO To change the template for this generated type comment go to * Window - Preferences - Java - Code Style - Code Templates */ public class ApplicationSessionFactory { private static Hashtable appSessionFactories; /** * This method will read a system wide configuration file * called ApplicationSecurityConfig.xml and initilaize the * session factories as per the application context names */ static{ appSessionFactories = new Hashtable(); /** * Read all the applicationContext entries in * the file and iterate through them. * for(int i=0;i<numberOfEntries;i++){ * build session factory here * appSessionFactories.put(applicationContextName,sf); * } */ } public static SessionFactory getSessionFactory(String applicationContextName){ /** * return (SessionFactory)appSessionFactories.get(applicationContextName); */ return null; } }
api/src/gov/nih/nci/security/system/ApplicationSessionFactory.java
*** empty log message *** SVN-Revision: 265
api/src/gov/nih/nci/security/system/ApplicationSessionFactory.java
*** empty log message ***
<ide><path>pi/src/gov/nih/nci/security/system/ApplicationSessionFactory.java <add>/* <add> * Created on Dec 30, 2004 <add> * <add> * TODO To change the template for this generated file go to <add> * Window - Preferences - Java - Code Style - Code Templates <add> */ <add>package gov.nih.nci.security.system; <add> <add>import java.util.*; <add>import net.sf.hibernate.SessionFactory; <add>import net.sf.hibernate.cfg.Configuration; <add>import java.io.*; <add> <add>/** <add> * @author kumarvi <add> * <add> * TODO To change the template for this generated type comment go to <add> * Window - Preferences - Java - Code Style - Code Templates <add> */ <add>public class ApplicationSessionFactory { <add> <add> private static Hashtable appSessionFactories; <add> <add> /** <add> * This method will read a system wide configuration file <add> * called ApplicationSecurityConfig.xml and initilaize the <add> * session factories as per the application context names <add> */ <add> static{ <add> appSessionFactories = new Hashtable(); <add> /** <add> * Read all the applicationContext entries in <add> * the file and iterate through them. <add> * for(int i=0;i<numberOfEntries;i++){ <add> * build session factory here <add> * appSessionFactories.put(applicationContextName,sf); <add> * } <add> */ <add> } <add> public static SessionFactory getSessionFactory(String applicationContextName){ <add> /** <add> * return (SessionFactory)appSessionFactories.get(applicationContextName); <add> */ <add> return null; <add> } <add>}
JavaScript
apache-2.0
6997288c6b1c2cdcc481070e501e30d705a51f32
0
lucidworks/lucidworks-view,AlexKolonitsky/lucidworks-view,AlexKolonitsky/lucidworks-view,lucidworks/lucidworks-view,lucidworks/lucidworks-view,AlexKolonitsky/lucidworks-view
(function() { 'use strict'; angular .module('fusionSeedApp.controllers.home', ['fusionSeedApp.services', 'angucomplete-alt']) .controller('HomeController', HomeController); function HomeController(ConfigService, URLService, Orwell, AuthService, _) { 'ngInject'; var hc = this; //eslint-disable-line var resultsObservable; var query; hc.searchQuery = '*:*'; activate(); //////////////// /** * Initializes a search from the URL object */ function activate() { hc.search = doSearch; hc.logout = logout; hc.appName = ConfigService.config.search_app_title; hc.logoLocation = ConfigService.config.logo_location; hc.status = 'loading'; hc.lastQuery = ''; query = URLService.getQueryFromUrl(); //Setting the query object... also populating the the view model hc.searchQuery = _.get(query,'q','*:*'); // Use an observable to get the contents of a queryResults after it is updated. resultsObservable = Orwell.getObservable('queryResults'); resultsObservable.addObserver(function(data) { if (data.hasOwnProperty('response')) { hc.numFound = data.response.numFound; hc.lastQuery = data.responseHeader.params.q; } else { hc.numFound = 0; } updateStatus(); }); URLService.setQuery(query); } function updateStatus(){ var status = ''; if(hc.numFound === 0){ status = 'no-results'; if(hc.lastQuery === ''){ status = 'get-started'; } } else { status = 'normal'; } hc.status = status; } /** * Initializes a new search. */ function doSearch() { query = { q: hc.searchQuery, start: 0, // TODO better solution for turning off fq on a new query fq: [] }; URLService.setQuery(query); } /** * Logs a user out of a session. */ function logout(){ AuthService.destroySession(); } } })();
client/assets/js/controllers/HomeController.js
(function() { 'use strict'; angular .module('fusionSeedApp.controllers.home', ['fusionSeedApp.services', 'angucomplete-alt']) .controller('HomeController', HomeController); function HomeController($scope, $log, ConfigService, QueryService, URLService, Orwell, AuthService, _, $timeout, $rootScope) { 'ngInject'; var hc = this; //eslint-disable-line var resultsObservable; var query; hc.searchQuery = '*:*'; activate(); //////////////// /** * Initializes a search from the URL object */ function activate() { hc.search = doSearch; hc.logout = logout; hc.appName = ConfigService.config.search_app_title; hc.logoLocation = ConfigService.config.logo_location; hc.status = 'loading'; hc.lastQuery = ''; query = URLService.getQueryFromUrl(); //Setting the query object... also populating the the view model hc.searchQuery = _.get(query,'q','*:*'); // Use an observable to get the contents of a queryResults after it is updated. resultsObservable = Orwell.getObservable('queryResults'); resultsObservable.addObserver(function(data) { if (data.hasOwnProperty('response')) { hc.numFound = data.response.numFound; hc.lastQuery = data.responseHeader.params.q; } else { hc.numFound = 0; } updateStatus(); }); URLService.setQuery(query); } function updateStatus(){ var status = ''; if(hc.numFound === 0){ status = 'no-results'; if(hc.lastQuery === ''){ status = 'get-started'; } } else { status = 'normal'; } hc.status = status; } /** * Initializes a new search. */ function doSearch() { query = { q: hc.searchQuery, start: 0, // TODO better solution for turning off fq on a new query fq: [] }; URLService.setQuery(query); } /** * Logs a user out of a session. */ function logout(){ AuthService.destroySession(); } } })();
remove excess dependencies from home controller
client/assets/js/controllers/HomeController.js
remove excess dependencies from home controller
<ide><path>lient/assets/js/controllers/HomeController.js <ide> .controller('HomeController', HomeController); <ide> <ide> <del> function HomeController($scope, $log, ConfigService, QueryService, URLService, Orwell, AuthService, _, $timeout, $rootScope) { <add> function HomeController(ConfigService, URLService, Orwell, AuthService, _) { <ide> <ide> 'ngInject'; <ide> var hc = this; //eslint-disable-line
Java
apache-2.0
244ebeb84a3e9a7e3e11ce59c537e399a8e2b9cf
0
kamir/jena,tr3vr/jena,CesarPantoja/jena,jianglili007/jena,CesarPantoja/jena,apache/jena,CesarPantoja/jena,apache/jena,kamir/jena,atsolakid/jena,CesarPantoja/jena,kidaa/jena,samaitra/jena,atsolakid/jena,jianglili007/jena,samaitra/jena,atsolakid/jena,kamir/jena,adrapereira/jena,atsolakid/jena,kamir/jena,adrapereira/jena,atsolakid/jena,atsolakid/jena,CesarPantoja/jena,kamir/jena,jianglili007/jena,tr3vr/jena,apache/jena,kidaa/jena,atsolakid/jena,kamir/jena,kidaa/jena,CesarPantoja/jena,apache/jena,kidaa/jena,tr3vr/jena,samaitra/jena,kidaa/jena,adrapereira/jena,tr3vr/jena,tr3vr/jena,tr3vr/jena,samaitra/jena,samaitra/jena,kamir/jena,apache/jena,apache/jena,kidaa/jena,CesarPantoja/jena,tr3vr/jena,samaitra/jena,jianglili007/jena,adrapereira/jena,adrapereira/jena,jianglili007/jena,apache/jena,jianglili007/jena,jianglili007/jena,samaitra/jena,adrapereira/jena,adrapereira/jena,kidaa/jena,apache/jena
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.atlas.data ; import org.apache.jena.query.ARQ ; import org.apache.jena.sparql.util.Context ; public class ThresholdPolicyFactory { private static final long defaultThreshold = -1 ; // Use the never() policy by default private static final ThresholdPolicy<?> NEVER = new ThresholdPolicy<Object>() { @Override public void increment(Object item) { // Do nothing } @Override public boolean isThresholdExceeded() { return false ; } @Override public void reset() { // Do nothing } } ; /** * A threshold policy that is never exceeded. */ public static final <E> ThresholdPolicy<E> never() { @SuppressWarnings("unchecked") ThresholdPolicy<E> policy = (ThresholdPolicy<E>) NEVER ; return policy ; } /** * A threshold policy based on the number of tuples added. */ public static <E> ThresholdPolicy<E> count(long threshold) { return new ThresholdPolicyCount<>(threshold) ; } /** * A threshold policy based on the {@link org.apache.jena.query.ARQ#spillToDiskThreshold} symbol in the given Context. * If the symbol is not set, then the {@link #never()} policy is used by default. */ public static <E> ThresholdPolicy<E> policyFromContext(Context context) { long threshold = context.getLong(ARQ.spillToDiskThreshold, defaultThreshold) ; if ( threshold >= 0 ) { return count(threshold); } else { return never() ; } } }
jena-arq/src/main/java/org/apache/jena/atlas/data/ThresholdPolicyFactory.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.atlas.data ; import org.apache.jena.query.ARQ ; import org.apache.jena.sparql.util.Context ; public class ThresholdPolicyFactory { private static final long defaultThreshold = -1 ; // Use the never() policy by default private static final ThresholdPolicy<?> NEVER = new ThresholdPolicy<Object>() { @Override public void increment(Object item) { // Do nothing } @Override public boolean isThresholdExceeded() { return false ; } @Override public void reset() { // Do nothing } } ; /** * A threshold policy that is never exceeded. */ public static final <E> ThresholdPolicy<E> never() { @SuppressWarnings("unchecked") ThresholdPolicy<E> policy = (ThresholdPolicy<E>) NEVER ; return policy ; } /** * A threshold policy based on the number of tuples added. */ public static <E> ThresholdPolicy<E> count(long threshold) { return new ThresholdPolicyCount<>(threshold) ; } /** * A threshold policy based on the {@link org.apache.jena.query.ARQ#spillToDiskThreshold} symbol in the given Context. * If the symbol is not set, then the {@link #never()} policy is used by default. */ public static <E> ThresholdPolicy<E> policyFromContext(Context context) { long threshold = (Long) context.get(ARQ.spillToDiskThreshold, defaultThreshold) ; if ( threshold >= 0 ) { return count(threshold); } else { return never() ; } } }
Use Context::getLong.
jena-arq/src/main/java/org/apache/jena/atlas/data/ThresholdPolicyFactory.java
Use Context::getLong.
<ide><path>ena-arq/src/main/java/org/apache/jena/atlas/data/ThresholdPolicyFactory.java <ide> */ <ide> public static <E> ThresholdPolicy<E> policyFromContext(Context context) <ide> { <del> long threshold = (Long) context.get(ARQ.spillToDiskThreshold, defaultThreshold) ; <add> long threshold = context.getLong(ARQ.spillToDiskThreshold, defaultThreshold) ; <ide> if ( threshold >= 0 ) <ide> { <ide> return count(threshold);
Java
apache-2.0
19ccd96fbea955410c83618abc611c8031a335bb
0
Rafiski/Shalon
src/clientes/Pesquisar.java
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package clientes; /** * * @author carlos */ public class Pesquisar extends javax.swing.JFrame { /** * Creates new form Pesquisar */ public Pesquisar() { initComponents(); } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { jLabel1 = new javax.swing.JLabel(); jLabel2 = new javax.swing.JLabel(); opNome = new javax.swing.JRadioButton(); opEmail = new javax.swing.JRadioButton(); opId = new javax.swing.JRadioButton(); texto = new javax.swing.JTextField(); jScrollPane2 = new javax.swing.JScrollPane(); jTable1 = new javax.swing.JTable(); jLabel3 = new javax.swing.JLabel(); jButton1 = new javax.swing.JButton(); jLabel5 = new javax.swing.JLabel(); jButton4 = new javax.swing.JButton(); jButton5 = new javax.swing.JButton(); setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE); setResizable(false); jLabel1.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N jLabel1.setText("Pesquisar Clientes"); jLabel2.setText("Pesquisar por:"); opNome.setText("Nome"); opNome.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { opNomeActionPerformed(evt); } }); opEmail.setText("Email"); opEmail.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { opEmailActionPerformed(evt); } }); opId.setText("ID"); opId.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { opIdActionPerformed(evt); } }); texto.setEditable(false); texto.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { textoActionPerformed(evt); } }); jTable1.setModel(new javax.swing.table.DefaultTableModel( new Object [][] { {null, null}, {null, null}, {null, null}, {null, null}, {null, null}, {null, null}, {null, null}, {null, null}, {null, null}, {null, null}, {null, null}, {null, null}, {null, null}, {null, null}, {null, null}, {null, null}, {null, null}, {null, null}, {null, null}, {null, null}, {null, null}, {null, null}, {null, null}, {null, null}, {null, null}, {null, null} }, new String [] { "ID", "Nome" } ) { Class[] types = new Class [] { java.lang.Integer.class, java.lang.String.class }; boolean[] canEdit = new boolean [] { false, false }; public Class getColumnClass(int columnIndex) { return types [columnIndex]; } public boolean isCellEditable(int rowIndex, int columnIndex) { return canEdit [columnIndex]; } }); jTable1.setMinimumSize(new java.awt.Dimension(30, 139)); jTable1.getTableHeader().setReorderingAllowed(false); jScrollPane2.setViewportView(jTable1); if (jTable1.getColumnModel().getColumnCount() > 0) { jTable1.getColumnModel().getColumn(0).setResizable(false); jTable1.getColumnModel().getColumn(0).setPreferredWidth(5); jTable1.getColumnModel().getColumn(1).setResizable(false); } jLabel3.setText("Resultados:"); jButton1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/Images/select.png"))); // NOI18N jButton1.setMaximumSize(new java.awt.Dimension(90, 23)); jButton1.setMinimumSize(new java.awt.Dimension(90, 23)); jButton1.setToolTipText("Selecionar"); jButton1.setPreferredSize(new java.awt.Dimension(90, 23)); jLabel5.setIcon(new javax.swing.ImageIcon(getClass().getResource("/Images/logoShalonSmall2.png"))); // NOI18N jButton4.setIcon(new javax.swing.ImageIcon(getClass().getResource("/Images/Blocked.png"))); // NOI18N jButton4.setToolTipText("Cancelar"); jButton4.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jButton4ActionPerformed(evt); } }); jButton5.setIcon(new javax.swing.ImageIcon(getClass().getResource("/Images/pesquisa.png"))); // NOI18N jButton5.setToolTipText("Pesquisar"); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(10, 10, 10) .addComponent(jLabel5, javax.swing.GroupLayout.PREFERRED_SIZE, 56, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(18, 18, 18) .addComponent(jLabel1) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.CENTER) .addComponent(jLabel2) .addComponent(jLabel3)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addComponent(jButton4, javax.swing.GroupLayout.PREFERRED_SIZE, 63, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(jButton1, javax.swing.GroupLayout.PREFERRED_SIZE, 63, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addGroup(layout.createSequentialGroup() .addComponent(opId) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(opNome) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(opEmail)) .addComponent(texto) .addComponent(jButton5, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(jScrollPane2, javax.swing.GroupLayout.PREFERRED_SIZE, 0, Short.MAX_VALUE))) .addContainerGap(45, Short.MAX_VALUE)) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addComponent(jLabel5, javax.swing.GroupLayout.PREFERRED_SIZE, 61, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGroup(layout.createSequentialGroup() .addGap(34, 34, 34) .addComponent(jLabel1))) .addGap(15, 15, 15) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel2) .addComponent(opId) .addComponent(opNome) .addComponent(opEmail)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(texto, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jButton5, javax.swing.GroupLayout.PREFERRED_SIZE, 24, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jScrollPane2, javax.swing.GroupLayout.PREFERRED_SIZE, 171, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jLabel3)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jButton4, javax.swing.GroupLayout.PREFERRED_SIZE, 52, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jButton1, javax.swing.GroupLayout.PREFERRED_SIZE, 53, javax.swing.GroupLayout.PREFERRED_SIZE)) .addContainerGap(53, Short.MAX_VALUE)) ); pack(); }// </editor-fold>//GEN-END:initComponents private void opNomeActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_opNomeActionPerformed if(opNome.isSelected()){ opEmail.setSelected(false); opId.setSelected(false); texto.setEditable(true); } else{ texto.setEditable(false); } }//GEN-LAST:event_opNomeActionPerformed private void opIdActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_opIdActionPerformed if(opId.isSelected()){ opEmail.setSelected(false); opNome.setSelected(false); texto.setEditable(true); } else{ texto.setEditable(false); } }//GEN-LAST:event_opIdActionPerformed private void opEmailActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_opEmailActionPerformed if(opEmail.isSelected()){ opNome.setSelected(false); opId.setSelected(false); texto.setEditable(true); } else{ texto.setEditable(false); } }//GEN-LAST:event_opEmailActionPerformed private void textoActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_textoActionPerformed // TODO add your handling code here: }//GEN-LAST:event_textoActionPerformed private void jButton4ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton4ActionPerformed this.dispose(); }//GEN-LAST:event_jButton4ActionPerformed /** * @param args the command line arguments */ public static void main(String args[]) { /* Set the Nimbus look and feel */ //<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) "> /* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel. * For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html */ try { for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) { if ("Nimbus".equals(info.getName())) { javax.swing.UIManager.setLookAndFeel(info.getClassName()); break; } } } catch (ClassNotFoundException ex) { java.util.logging.Logger.getLogger(Pesquisar.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (InstantiationException ex) { java.util.logging.Logger.getLogger(Pesquisar.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (IllegalAccessException ex) { java.util.logging.Logger.getLogger(Pesquisar.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (javax.swing.UnsupportedLookAndFeelException ex) { java.util.logging.Logger.getLogger(Pesquisar.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } //</editor-fold> /* Create and display the form */ java.awt.EventQueue.invokeLater(new Runnable() { public void run() { new Pesquisar().setVisible(true); } }); } // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton jButton1; private javax.swing.JButton jButton4; private javax.swing.JButton jButton5; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel2; private javax.swing.JLabel jLabel3; private javax.swing.JLabel jLabel5; private javax.swing.JScrollPane jScrollPane2; private javax.swing.JTable jTable1; private javax.swing.JRadioButton opEmail; private javax.swing.JRadioButton opId; private javax.swing.JRadioButton opNome; private javax.swing.JTextField texto; // End of variables declaration//GEN-END:variables }
Delete Pesquisar.java
src/clientes/Pesquisar.java
Delete Pesquisar.java
<ide><path>rc/clientes/Pesquisar.java <del>/* <del> * To change this license header, choose License Headers in Project Properties. <del> * To change this template file, choose Tools | Templates <del> * and open the template in the editor. <del> */ <del> <del>package clientes; <del> <del>/** <del> * <del> * @author carlos <del> */ <del>public class Pesquisar extends javax.swing.JFrame { <del> <del> /** <del> * Creates new form Pesquisar <del> */ <del> public Pesquisar() { <del> initComponents(); <del> } <del> <del> /** <del> * This method is called from within the constructor to initialize the form. <del> * WARNING: Do NOT modify this code. The content of this method is always <del> * regenerated by the Form Editor. <del> */ <del> @SuppressWarnings("unchecked") <del> // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents <del> private void initComponents() { <del> <del> jLabel1 = new javax.swing.JLabel(); <del> jLabel2 = new javax.swing.JLabel(); <del> opNome = new javax.swing.JRadioButton(); <del> opEmail = new javax.swing.JRadioButton(); <del> opId = new javax.swing.JRadioButton(); <del> texto = new javax.swing.JTextField(); <del> jScrollPane2 = new javax.swing.JScrollPane(); <del> jTable1 = new javax.swing.JTable(); <del> jLabel3 = new javax.swing.JLabel(); <del> jButton1 = new javax.swing.JButton(); <del> jLabel5 = new javax.swing.JLabel(); <del> jButton4 = new javax.swing.JButton(); <del> jButton5 = new javax.swing.JButton(); <del> <del> setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE); <del> setResizable(false); <del> <del> jLabel1.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N <del> jLabel1.setText("Pesquisar Clientes"); <del> <del> jLabel2.setText("Pesquisar por:"); <del> <del> opNome.setText("Nome"); <del> opNome.addActionListener(new java.awt.event.ActionListener() { <del> public void actionPerformed(java.awt.event.ActionEvent evt) { <del> opNomeActionPerformed(evt); <del> } <del> }); <del> <del> opEmail.setText("Email"); <del> opEmail.addActionListener(new java.awt.event.ActionListener() { <del> public void actionPerformed(java.awt.event.ActionEvent evt) { <del> opEmailActionPerformed(evt); <del> } <del> }); <del> <del> opId.setText("ID"); <del> opId.addActionListener(new java.awt.event.ActionListener() { <del> public void actionPerformed(java.awt.event.ActionEvent evt) { <del> opIdActionPerformed(evt); <del> } <del> }); <del> <del> texto.setEditable(false); <del> texto.addActionListener(new java.awt.event.ActionListener() { <del> public void actionPerformed(java.awt.event.ActionEvent evt) { <del> textoActionPerformed(evt); <del> } <del> }); <del> <del> jTable1.setModel(new javax.swing.table.DefaultTableModel( <del> new Object [][] { <del> {null, null}, <del> {null, null}, <del> {null, null}, <del> {null, null}, <del> {null, null}, <del> {null, null}, <del> {null, null}, <del> {null, null}, <del> {null, null}, <del> {null, null}, <del> {null, null}, <del> {null, null}, <del> {null, null}, <del> {null, null}, <del> {null, null}, <del> {null, null}, <del> {null, null}, <del> {null, null}, <del> {null, null}, <del> {null, null}, <del> {null, null}, <del> {null, null}, <del> {null, null}, <del> {null, null}, <del> {null, null}, <del> {null, null} <del> }, <del> new String [] { <del> "ID", "Nome" <del> } <del> ) { <del> Class[] types = new Class [] { <del> java.lang.Integer.class, java.lang.String.class <del> }; <del> boolean[] canEdit = new boolean [] { <del> false, false <del> }; <del> <del> public Class getColumnClass(int columnIndex) { <del> return types [columnIndex]; <del> } <del> <del> public boolean isCellEditable(int rowIndex, int columnIndex) { <del> return canEdit [columnIndex]; <del> } <del> }); <del> jTable1.setMinimumSize(new java.awt.Dimension(30, 139)); <del> jTable1.getTableHeader().setReorderingAllowed(false); <del> jScrollPane2.setViewportView(jTable1); <del> if (jTable1.getColumnModel().getColumnCount() > 0) { <del> jTable1.getColumnModel().getColumn(0).setResizable(false); <del> jTable1.getColumnModel().getColumn(0).setPreferredWidth(5); <del> jTable1.getColumnModel().getColumn(1).setResizable(false); <del> } <del> <del> jLabel3.setText("Resultados:"); <del> <del> jButton1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/Images/select.png"))); // NOI18N <del> jButton1.setMaximumSize(new java.awt.Dimension(90, 23)); <del> jButton1.setMinimumSize(new java.awt.Dimension(90, 23)); <del> jButton1.setToolTipText("Selecionar"); <del> jButton1.setPreferredSize(new java.awt.Dimension(90, 23)); <del> <del> jLabel5.setIcon(new javax.swing.ImageIcon(getClass().getResource("/Images/logoShalonSmall2.png"))); // NOI18N <del> <del> jButton4.setIcon(new javax.swing.ImageIcon(getClass().getResource("/Images/Blocked.png"))); // NOI18N <del> jButton4.setToolTipText("Cancelar"); <del> jButton4.addActionListener(new java.awt.event.ActionListener() { <del> public void actionPerformed(java.awt.event.ActionEvent evt) { <del> jButton4ActionPerformed(evt); <del> } <del> }); <del> <del> jButton5.setIcon(new javax.swing.ImageIcon(getClass().getResource("/Images/pesquisa.png"))); // NOI18N <del> jButton5.setToolTipText("Pesquisar"); <del> <del> javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); <del> getContentPane().setLayout(layout); <del> layout.setHorizontalGroup( <del> layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) <del> .addGroup(layout.createSequentialGroup() <del> .addGap(10, 10, 10) <del> .addComponent(jLabel5, javax.swing.GroupLayout.PREFERRED_SIZE, 56, javax.swing.GroupLayout.PREFERRED_SIZE) <del> .addGap(18, 18, 18) <del> .addComponent(jLabel1) <del> .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) <del> .addGroup(layout.createSequentialGroup() <del> .addContainerGap() <del> .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.CENTER) <del> .addComponent(jLabel2) <del> .addComponent(jLabel3)) <del> .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) <del> .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) <del> .addGroup(layout.createSequentialGroup() <del> .addComponent(jButton4, javax.swing.GroupLayout.PREFERRED_SIZE, 63, javax.swing.GroupLayout.PREFERRED_SIZE) <del> .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) <del> .addComponent(jButton1, javax.swing.GroupLayout.PREFERRED_SIZE, 63, javax.swing.GroupLayout.PREFERRED_SIZE)) <del> .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) <del> .addGroup(layout.createSequentialGroup() <del> .addComponent(opId) <del> .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) <del> .addComponent(opNome) <del> .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) <del> .addComponent(opEmail)) <del> .addComponent(texto) <del> .addComponent(jButton5, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) <del> .addComponent(jScrollPane2, javax.swing.GroupLayout.PREFERRED_SIZE, 0, Short.MAX_VALUE))) <del> .addContainerGap(45, Short.MAX_VALUE)) <del> ); <del> layout.setVerticalGroup( <del> layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) <del> .addGroup(layout.createSequentialGroup() <del> .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) <del> .addGroup(layout.createSequentialGroup() <del> .addContainerGap() <del> .addComponent(jLabel5, javax.swing.GroupLayout.PREFERRED_SIZE, 61, javax.swing.GroupLayout.PREFERRED_SIZE)) <del> .addGroup(layout.createSequentialGroup() <del> .addGap(34, 34, 34) <del> .addComponent(jLabel1))) <del> .addGap(15, 15, 15) <del> .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) <del> .addComponent(jLabel2) <del> .addComponent(opId) <del> .addComponent(opNome) <del> .addComponent(opEmail)) <del> .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) <del> .addComponent(texto, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) <del> .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) <del> .addComponent(jButton5, javax.swing.GroupLayout.PREFERRED_SIZE, 24, javax.swing.GroupLayout.PREFERRED_SIZE) <del> .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) <del> .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) <del> .addComponent(jScrollPane2, javax.swing.GroupLayout.PREFERRED_SIZE, 171, javax.swing.GroupLayout.PREFERRED_SIZE) <del> .addComponent(jLabel3)) <del> .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) <del> .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) <del> .addComponent(jButton4, javax.swing.GroupLayout.PREFERRED_SIZE, 52, javax.swing.GroupLayout.PREFERRED_SIZE) <del> .addComponent(jButton1, javax.swing.GroupLayout.PREFERRED_SIZE, 53, javax.swing.GroupLayout.PREFERRED_SIZE)) <del> .addContainerGap(53, Short.MAX_VALUE)) <del> ); <del> <del> pack(); <del> }// </editor-fold>//GEN-END:initComponents <del> <del> private void opNomeActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_opNomeActionPerformed <del> if(opNome.isSelected()){ <del> opEmail.setSelected(false); <del> opId.setSelected(false); <del> texto.setEditable(true); <del> } <del> else{ <del> texto.setEditable(false); <del> } <del> }//GEN-LAST:event_opNomeActionPerformed <del> <del> private void opIdActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_opIdActionPerformed <del> if(opId.isSelected()){ <del> opEmail.setSelected(false); <del> opNome.setSelected(false); <del> texto.setEditable(true); <del> } <del> else{ <del> texto.setEditable(false); <del> } <del> }//GEN-LAST:event_opIdActionPerformed <del> <del> private void opEmailActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_opEmailActionPerformed <del> if(opEmail.isSelected()){ <del> opNome.setSelected(false); <del> opId.setSelected(false); <del> texto.setEditable(true); <del> } <del> else{ <del> texto.setEditable(false); <del> } <del> }//GEN-LAST:event_opEmailActionPerformed <del> <del> private void textoActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_textoActionPerformed <del> // TODO add your handling code here: <del> }//GEN-LAST:event_textoActionPerformed <del> <del> private void jButton4ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton4ActionPerformed <del> this.dispose(); <del> }//GEN-LAST:event_jButton4ActionPerformed <del> <del> /** <del> * @param args the command line arguments <del> */ <del> public static void main(String args[]) { <del> /* Set the Nimbus look and feel */ <del> //<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) "> <del> /* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel. <del> * For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html <del> */ <del> try { <del> for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) { <del> if ("Nimbus".equals(info.getName())) { <del> javax.swing.UIManager.setLookAndFeel(info.getClassName()); <del> break; <del> } <del> } <del> } catch (ClassNotFoundException ex) { <del> java.util.logging.Logger.getLogger(Pesquisar.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); <del> } catch (InstantiationException ex) { <del> java.util.logging.Logger.getLogger(Pesquisar.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); <del> } catch (IllegalAccessException ex) { <del> java.util.logging.Logger.getLogger(Pesquisar.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); <del> } catch (javax.swing.UnsupportedLookAndFeelException ex) { <del> java.util.logging.Logger.getLogger(Pesquisar.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); <del> } <del> //</editor-fold> <del> <del> /* Create and display the form */ <del> java.awt.EventQueue.invokeLater(new Runnable() { <del> public void run() { <del> new Pesquisar().setVisible(true); <del> } <del> }); <del> } <del> <del> // Variables declaration - do not modify//GEN-BEGIN:variables <del> private javax.swing.JButton jButton1; <del> private javax.swing.JButton jButton4; <del> private javax.swing.JButton jButton5; <del> private javax.swing.JLabel jLabel1; <del> private javax.swing.JLabel jLabel2; <del> private javax.swing.JLabel jLabel3; <del> private javax.swing.JLabel jLabel5; <del> private javax.swing.JScrollPane jScrollPane2; <del> private javax.swing.JTable jTable1; <del> private javax.swing.JRadioButton opEmail; <del> private javax.swing.JRadioButton opId; <del> private javax.swing.JRadioButton opNome; <del> private javax.swing.JTextField texto; <del> // End of variables declaration//GEN-END:variables <del>}
Java
agpl-3.0
003c89c3a4fa9f97c381d28914edc101dad71eb0
0
virustotalop/mcMMO,jhonMalcom79/mcMMO_pers,Maximvdw/mcMMO,isokissa3/mcMMO,EvilOlaf/mcMMO
package com.gmail.nossr50.skills.repair; import org.bukkit.GameMode; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.entity.Player; import org.bukkit.inventory.ItemStack; import org.getspout.spoutapi.SpoutManager; import org.getspout.spoutapi.player.SpoutPlayer; import com.gmail.nossr50.mcMMO; import com.gmail.nossr50.config.Config; import com.gmail.nossr50.datatypes.PlayerProfile; import com.gmail.nossr50.datatypes.SkillType; import com.gmail.nossr50.locale.LocaleLoader; import com.gmail.nossr50.util.ItemChecks; import com.gmail.nossr50.util.Misc; import com.gmail.nossr50.util.Permissions; import com.gmail.nossr50.util.Users; public class Salvage { private static Config configInstance = Config.getInstance(); public static void handleSalvage(final Player player, final Location location, final ItemStack inHand) { if (!Permissions.salvage(player) || !configInstance.getSalvageEnabled()) { return; } if(player.getGameMode() == GameMode.SURVIVAL) { final PlayerProfile profile = Users.getProfile(player); final int skillLevel = profile.getSkillLevel(SkillType.REPAIR); final int unlockLevel = configInstance.getSalvageUnlockLevel(); if (skillLevel >= unlockLevel) { final float currentdura = inHand.getDurability(); if (currentdura == 0) { final int salvagedAmount = getSalvagedAmount(inHand); final int itemID = getSalvagedItemID(inHand); player.setItemInHand(new ItemStack(Material.AIR)); location.setY(location.getY() + 1); Misc.dropItem(location, new ItemStack(itemID, salvagedAmount)); player.sendMessage(LocaleLoader.getString("Repair.Skills.SalvageSuccess")); } else { player.sendMessage(LocaleLoader.getString("Repair.Skills.NotFullDurability")); } } else { player.sendMessage(LocaleLoader.getString("Repair.Skills.AdeptSalvage")); } } } /** * Handles notifications for placing an anvil. * * @param player The player placing the anvil * @param anvilID The item ID of the anvil block */ public static void placedAnvilCheck(final Player player, final int anvilID) { final PlayerProfile profile = Users.getProfile(player); if (!profile.getPlacedSalvageAnvil()) { if (mcMMO.spoutEnabled) { final SpoutPlayer spoutPlayer = SpoutManager.getPlayer(player); if (spoutPlayer.isSpoutCraftEnabled()) { spoutPlayer.sendNotification("[mcMMO] Anvil Placed", "Right click to salvage!", Material.getMaterial(anvilID)); } } else { player.sendMessage(LocaleLoader.getString("Repair.Listener.Anvil2")); } profile.togglePlacedSalvageAnvil(); } } public static int getSalvagedItemID(final ItemStack inHand) { int salvagedItem = 0; if (ItemChecks.isDiamondTool(inHand) || ItemChecks.isDiamondArmor(inHand)) salvagedItem = 264; else if (ItemChecks.isGoldTool(inHand) || ItemChecks.isGoldArmor(inHand)) salvagedItem = 266; else if (ItemChecks.isIronTool(inHand) || ItemChecks.isIronArmor(inHand)) salvagedItem = 265; else if (ItemChecks.isStoneTool(inHand)) salvagedItem = 4; else if (ItemChecks.isWoodTool(inHand)) salvagedItem = 5; else if ( ItemChecks.isLeatherArmor(inHand)) salvagedItem = 334; return salvagedItem; } public static int getSalvagedAmount(final ItemStack inHand) { int salvagedAmount = 0; if (ItemChecks.isPickaxe(inHand) || ItemChecks.isAxe(inHand)) salvagedAmount = 3; else if (ItemChecks.isShovel(inHand)) salvagedAmount = 1; else if (ItemChecks.isSword(inHand) || ItemChecks.isHoe(inHand)) salvagedAmount = 2; else if (ItemChecks.isHelmet(inHand)) salvagedAmount = 5; else if (ItemChecks.isChestplate(inHand)) salvagedAmount = 8; else if (ItemChecks.isPants(inHand)) salvagedAmount = 7; else if (ItemChecks.isBoots(inHand)) salvagedAmount = 4; return salvagedAmount; } /** * Checks if the item is salvageable. * * @param is Item to check * @return true if the item is salvageable, false otherwise */ public static boolean isSalvageable(final ItemStack is) { if (configInstance.getSalvageTools() && ItemChecks.isTool(is)) { return true; } if (configInstance.getSalvageArmor() && ItemChecks.isArmor(is)) { return true; } return false; } }
src/main/java/com/gmail/nossr50/skills/repair/Salvage.java
package com.gmail.nossr50.skills.repair; import org.bukkit.GameMode; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.entity.Player; import org.bukkit.inventory.ItemStack; import org.getspout.spoutapi.SpoutManager; import org.getspout.spoutapi.player.SpoutPlayer; import com.gmail.nossr50.mcMMO; import com.gmail.nossr50.config.Config; import com.gmail.nossr50.datatypes.PlayerProfile; import com.gmail.nossr50.datatypes.SkillType; import com.gmail.nossr50.locale.LocaleLoader; import com.gmail.nossr50.util.ItemChecks; import com.gmail.nossr50.util.Misc; import com.gmail.nossr50.util.Permissions; import com.gmail.nossr50.util.Users; public class Salvage { private static Config configInstance = Config.getInstance(); public static void handleSalvage(final Player player, final Location location, final ItemStack inHand) { if (!Permissions.salvage(player) || !configInstance.getSalvageEnabled()) { return; } if(player.getGameMode() == GameMode.SURVIVAL) { final PlayerProfile profile = Users.getProfile(player); final int skillLevel = profile.getSkillLevel(SkillType.REPAIR); final int unlockLevel = configInstance.getSalvageUnlockLevel(); if (skillLevel >= unlockLevel) { final float currentdura = inHand.getDurability(); if (currentdura == 0) { final int salvagedAmount = getSalvagedAmount(inHand); final int itemID = getSalvagedItemID(inHand); player.setItemInHand(new ItemStack(0)); location.setY(location.getY() + 1); Misc.dropItem(location, new ItemStack(itemID, salvagedAmount)); player.sendMessage(LocaleLoader.getString("Repair.Skills.SalvageSuccess")); } else { player.sendMessage(LocaleLoader.getString("Repair.Skills.NotFullDurability")); } } else { player.sendMessage(LocaleLoader.getString("Repair.Skills.AdeptSalvage")); } } } /** * Handles notifications for placing an anvil. * * @param player The player placing the anvil * @param anvilID The item ID of the anvil block */ public static void placedAnvilCheck(final Player player, final int anvilID) { final PlayerProfile profile = Users.getProfile(player); if (!profile.getPlacedSalvageAnvil()) { if (mcMMO.spoutEnabled) { final SpoutPlayer spoutPlayer = SpoutManager.getPlayer(player); if (spoutPlayer.isSpoutCraftEnabled()) { spoutPlayer.sendNotification("[mcMMO] Anvil Placed", "Right click to salvage!", Material.getMaterial(anvilID)); } } else { player.sendMessage(LocaleLoader.getString("Repair.Listener.Anvil2")); } profile.togglePlacedSalvageAnvil(); } } public static int getSalvagedItemID(final ItemStack inHand) { int salvagedItem = 0; if (ItemChecks.isDiamondTool(inHand) || ItemChecks.isDiamondArmor(inHand)) salvagedItem = 264; else if (ItemChecks.isGoldTool(inHand) || ItemChecks.isGoldArmor(inHand)) salvagedItem = 266; else if (ItemChecks.isIronTool(inHand) || ItemChecks.isIronArmor(inHand)) salvagedItem = 265; else if (ItemChecks.isStoneTool(inHand)) salvagedItem = 4; else if (ItemChecks.isWoodTool(inHand)) salvagedItem = 5; else if ( ItemChecks.isLeatherArmor(inHand)) salvagedItem = 334; return salvagedItem; } public static int getSalvagedAmount(final ItemStack inHand) { int salvagedAmount = 0; if (ItemChecks.isPickaxe(inHand) || ItemChecks.isAxe(inHand)) salvagedAmount = 3; else if (ItemChecks.isShovel(inHand)) salvagedAmount = 1; else if (ItemChecks.isSword(inHand) || ItemChecks.isHoe(inHand)) salvagedAmount = 2; else if (ItemChecks.isHelmet(inHand)) salvagedAmount = 5; else if (ItemChecks.isChestplate(inHand)) salvagedAmount = 8; else if (ItemChecks.isPants(inHand)) salvagedAmount = 7; else if (ItemChecks.isBoots(inHand)) salvagedAmount = 4; return salvagedAmount; } /** * Checks if the item is salvageable. * * @param is Item to check * @return true if the item is salvageable, false otherwise */ public static boolean isSalvageable(final ItemStack is) { if (configInstance.getSalvageTools() && ItemChecks.isTool(is)) { return true; } if (configInstance.getSalvageArmor() && ItemChecks.isArmor(is)) { return true; } return false; } }
Missed one.
src/main/java/com/gmail/nossr50/skills/repair/Salvage.java
Missed one.
<ide><path>rc/main/java/com/gmail/nossr50/skills/repair/Salvage.java <ide> final int salvagedAmount = getSalvagedAmount(inHand); <ide> final int itemID = getSalvagedItemID(inHand); <ide> <del> player.setItemInHand(new ItemStack(0)); <add> player.setItemInHand(new ItemStack(Material.AIR)); <ide> location.setY(location.getY() + 1); <ide> Misc.dropItem(location, new ItemStack(itemID, salvagedAmount)); <ide> player.sendMessage(LocaleLoader.getString("Repair.Skills.SalvageSuccess"));
Java
apache-2.0
error: pathspec 'Cipher/Rot13.java' did not match any file(s) known to git
2ca3309f0c4749ff15383eaf7de45741fe94cfdc
1
caspar/Java
import java.io.*; import java.util.*; public class Rot13{ //doesn't work, but I'm tired - Caspar public static void main(String[] args){ try{ Rot13 R = new Rot13(args[0]); }catch(Exception e){ System.out.println("Something's not right"); } } public Rot13(String input){ input = Decapitalize(input); String output = ""; for (int i = 0; i < input.length(); i++){ if (input.charAt(i) + 13 > 122){ output += (char) ('a' + (13 - (122-input.charAt(i)))); } else { if (input.charAt(i) > 122 || input.charAt(i) < 97) output += (char) (input.charAt(i)); else output += (char) (input.charAt(i) + 13); } } System.out.println(output); } private String Decapitalize(String input){ String output = ""; for (int i = 0; i < input.length(); i++){ if (input.charAt(i) >= 65 && input.charAt(i) <= 90) output += (char) (input.charAt(i) + 32); else output += input.charAt(i); } return output; } }
Cipher/Rot13.java
Cipher - Caspar
Cipher/Rot13.java
Cipher - Caspar
<ide><path>ipher/Rot13.java <add>import java.io.*; <add>import java.util.*; <add> <add>public class Rot13{ <add> <add> //doesn't work, but I'm tired - Caspar <add> <add> public static void main(String[] args){ <add> <add> try{ <add> Rot13 R = new Rot13(args[0]); <add> }catch(Exception e){ <add> System.out.println("Something's not right"); <add> } <add> } <add> <add> public Rot13(String input){ <add> input = Decapitalize(input); <add> String output = ""; <add> <add> for (int i = 0; i < input.length(); i++){ <add> if (input.charAt(i) + 13 > 122){ <add> output += (char) ('a' + (13 - (122-input.charAt(i)))); <add> } <add> else { <add> if (input.charAt(i) > 122 || input.charAt(i) < 97) <add> output += (char) (input.charAt(i)); <add> else <add> output += (char) (input.charAt(i) + 13); <add> } <add> } <add> System.out.println(output); <add> } <add> private String Decapitalize(String input){ <add> String output = ""; <add> for (int i = 0; i < input.length(); i++){ <add> if (input.charAt(i) >= 65 && input.charAt(i) <= 90) <add> output += (char) (input.charAt(i) + 32); <add> else <add> output += input.charAt(i); <add> } <add> return output; <add> } <add> <add>}
JavaScript
mit
21a5baa8d946f8efe17cb34a26d2db6f8e721aac
0
MadballNeek/hitagi.js,RoganMurley/hitagi.js,ThiagoFerreir4/hitagi.js,shaunstanislaus/hitagi.js,zarnold/hitagi.js,Iced-Tea/hitagi.js
(function () { "use strict"; // Setup dimensions. var levelWidth = 600; var levelHeight = 400; // Setup pixi. var stage = new PIXI.Stage(0x141c22); var renderer = PIXI.autoDetectRenderer(levelWidth, levelHeight); document.body.appendChild(renderer.view); // Setup world. var world = new hitagi.World(); // Setup controls. var controls = new hitagi.Controls(); controls.bind(38, 'up'); controls.bind(40, 'down'); // Register systems. var renderSystem = new hitagi.systems.PixiRenderSystem(stage); world.register(renderSystem); world.register(new hitagi.systems.VelocitySystem()); var PlayerPaddleSystem = function () { var that = this; var verticalBounce = function (entity) { entity.c.velocity.yspeed *= -1.4; }; that.update = function (entity, dt) { if (entity.has('player') && entity.has('paddle')) { // Handle player input. if (controls.check('up')) { entity.c.velocity.yspeed -= entity.c.paddle.speed; } if (controls.check('down')) { entity.c.velocity.yspeed += entity.c.paddle.speed; } // Add friction to paddle. entity.c.velocity.yspeed *= entity.c.paddle.friction; // Stop paddle from leaving screen. if (entity.c.position.y < 0) { verticalBounce(entity); } if (entity.c.position.y + entity.c.paddle.height > levelHeight) { verticalBounce(entity); } } }; }; world.register(new PlayerPaddleSystem()); // Add entities. world.add( new hitagi.Entity() .attach(new hitagi.components.Position({ x: 8, y: 0 })) .attach(new hitagi.components.Velocity({ xspeed: 0, yspeed: 0 })) .attach(new hitagi.components.Rectangle({ x1: 0, y1: 0, x2: 16, y2: 128 })) .attach({'id': 'player'}) .attach({ 'id': 'paddle', 'friction': 0.9, 'height': 128, 'speed': 1, 'width': 16 }) ); // Setup game loop. requestAnimationFrame(animate); function animate() { // Update the world, using a fixed delta time. world.tick(1000); // Render the world. renderer.render(stage); // Next frame. requestAnimationFrame(animate); } } ());
examples/example2/example2.js
(function () { "use strict"; // Setup pixi. var stage = new PIXI.Stage(0x141c22); var renderer = PIXI.autoDetectRenderer(600, 400); document.body.appendChild(renderer.view); // Setup world. var world = new hitagi.World(); // Setup controls. var controls = new hitagi.Controls(); controls.bind(38, 'up'); controls.bind(40, 'down'); // Register systems. var renderSystem = new hitagi.systems.PixiRenderSystem(stage); world.register(renderSystem); world.register(new hitagi.systems.VelocitySystem()); var PaddleSystem = function () { var that = this; that.update = function (entity, dt) { if (entity.has('paddle')) { if (controls.check('up')) { entity.c.velocity.yspeed -= entity.c.paddle.speed; } if (controls.check('down')) { entity.c.velocity.yspeed += entity.c.paddle.speed; } entity.c.velocity.yspeed *= entity.c.paddle.friction; } }; }; world.register(new PaddleSystem()); // Add entities. world.add( new hitagi.Entity() .attach(new hitagi.components.Position({ x: 8, y: 0 })) .attach(new hitagi.components.Velocity({ xspeed: 0, yspeed: 0 })) .attach(new hitagi.components.Rectangle({ x1: 0, y1: 0, x2: 16, y2: 128 })) .attach({ 'id': 'paddle', 'speed': 1, 'friction': 0.9 }) ); // Setup game loop. requestAnimationFrame(animate); function animate() { // Update the world, using a fixed delta time. world.tick(1000); // Render the world. renderer.render(stage); // Next frame. requestAnimationFrame(animate); } } ());
Added bouncing to Player paddle on edges of screen
examples/example2/example2.js
Added bouncing to Player paddle on edges of screen
<ide><path>xamples/example2/example2.js <ide> (function () { <ide> "use strict"; <ide> <add> // Setup dimensions. <add> var levelWidth = 600; <add> var levelHeight = 400; <add> <ide> // Setup pixi. <ide> var stage = new PIXI.Stage(0x141c22); <del> var renderer = PIXI.autoDetectRenderer(600, 400); <add> var renderer = PIXI.autoDetectRenderer(levelWidth, levelHeight); <ide> document.body.appendChild(renderer.view); <ide> <ide> // Setup world. <ide> world.register(renderSystem); <ide> world.register(new hitagi.systems.VelocitySystem()); <ide> <del> var PaddleSystem = function () { <add> var PlayerPaddleSystem = function () { <ide> var that = this; <ide> <add> var verticalBounce = function (entity) { <add> entity.c.velocity.yspeed *= -1.4; <add> }; <add> <ide> that.update = function (entity, dt) { <del> if (entity.has('paddle')) { <add> if (entity.has('player') && entity.has('paddle')) { <add> // Handle player input. <ide> if (controls.check('up')) { <ide> entity.c.velocity.yspeed -= entity.c.paddle.speed; <ide> } <ide> if (controls.check('down')) { <ide> entity.c.velocity.yspeed += entity.c.paddle.speed; <ide> } <add> <add> // Add friction to paddle. <ide> entity.c.velocity.yspeed *= entity.c.paddle.friction; <add> <add> // Stop paddle from leaving screen. <add> if (entity.c.position.y < 0) { <add> verticalBounce(entity); <add> } <add> if (entity.c.position.y + entity.c.paddle.height > levelHeight) { <add> verticalBounce(entity); <add> } <ide> } <ide> }; <ide> }; <del> world.register(new PaddleSystem()); <add> world.register(new PlayerPaddleSystem()); <ide> <ide> // Add entities. <ide> world.add( <ide> x2: 16, <ide> y2: 128 <ide> })) <add> .attach({'id': 'player'}) <ide> .attach({ <ide> 'id': 'paddle', <add> 'friction': 0.9, <add> 'height': 128, <ide> 'speed': 1, <del> 'friction': 0.9 <add> 'width': 16 <ide> }) <ide> ); <ide>
Java
apache-2.0
7efd144df7cf04dd7c6352fffe361f357070cc17
0
jaamsim/jaamsim,jaamsim/jaamsim,jaamsim/jaamsim,jaamsim/jaamsim
/* * JaamSim Discrete Event Simulation * Copyright (C) 2013 Ausenco Engineering Canada Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. */ package com.jaamsim.BasicObjects; import com.jaamsim.Samples.SampleConstant; import com.jaamsim.Samples.SampleExpInput; import com.jaamsim.Thresholds.Threshold; import com.jaamsim.input.InputAgent; import com.jaamsim.input.Keyword; import com.jaamsim.input.Output; import com.jaamsim.units.DimensionlessUnit; import com.jaamsim.units.TimeUnit; import com.sandwell.JavaSimulation.Entity; import com.sandwell.JavaSimulation.EntityInput; import com.sandwell.JavaSimulation.EntityTarget; import com.sandwell.JavaSimulation.InputErrorException; import com.sandwell.JavaSimulation.IntegerInput; import com.sandwell.JavaSimulation3D.DisplayEntity; /** * EntityGenerator creates sequence of DisplayEntities at random intervals, which are placed in a target Queue. */ public class EntityGenerator extends LinkedComponent { @Keyword(description = "The arrival time for the first generated entity.\n" + "A constant value, a distribution to be sampled, or a time series can be entered.", example = "EntityGenerator1 FirstArrivalTime { 1.0 h }") private final SampleExpInput firstArrivalTime; @Keyword(description = "The inter-arrival time between generated entities.\n" + "A constant value, a distribution to be sampled, or a time series can be entered.", example = "EntityGenerator1 InterArrivalTime { 1.5 h }") private final SampleExpInput interArrivalTime; @Keyword(description = "The prototype for entities to be generated.\n" + "The generated entities will be copies of this entity.", example = "EntityGenerator1 PrototypeEntity { Ship }") private final EntityInput<DisplayEntity> prototypeEntity; @Keyword(description = "The maximum number of entities to be generated.\n" + "Default is no limit.", example = "EntityGenerator1 MaxNumber { 3 }") private final IntegerInput maxNumber; private int numberGenerated = 0; // Number of entities generated so far private boolean busy; { firstArrivalTime = new SampleExpInput( "FirstArrivalTime", "Key Inputs", new SampleConstant(TimeUnit.class, 0.0)); firstArrivalTime.setUnitType( TimeUnit.class ); firstArrivalTime.setEntity(this); this.addInput( firstArrivalTime); interArrivalTime = new SampleExpInput( "InterArrivalTime", "Key Inputs", new SampleConstant(TimeUnit.class, 1.0)); interArrivalTime.setUnitType( TimeUnit.class ); interArrivalTime.setEntity(this); this.addInput( interArrivalTime); prototypeEntity = new EntityInput<DisplayEntity>( DisplayEntity.class, "PrototypeEntity", "Key Inputs", null); this.addInput( prototypeEntity); maxNumber = new IntegerInput( "MaxNumber", "Key Inputs", null); maxNumber.setValidRange(1, Integer.MAX_VALUE); this.addInput( maxNumber); } public EntityGenerator() { } @Override public void validate() { super.validate(); // Confirm that prototype entity has been specified if( prototypeEntity.getValue() == null ) { throw new InputErrorException( "The keyword PrototypeEntity must be set." ); } interArrivalTime.verifyUnit(); } @Override public void earlyInit() { super.earlyInit(); numberGenerated = 0; busy = false; } @Override public void startUp() { super.startUp(); // Generate the first entity and start the recursive loop to continue the process double dt = firstArrivalTime.getValue().getNextSample(0.0); this.scheduleProcess(dt, 5, new CreateNextEntityTarget(this, "createNextEntity")); } private static class CreateNextEntityTarget extends EntityTarget<EntityGenerator> { public CreateNextEntityTarget(EntityGenerator ent, String method) { super(ent, method); } @Override public void process() { ent.createNextEntity(); } } @Override public void thresholdChanged() { // Is restart required? if (busy) return; // Are all the thresholds satisfied? for( Threshold thr : this.getThresholds() ) { if( thr.isClosed() ) return; } // Has the last entity been generated? if( maxNumber.getValue() != null && numberGenerated >= maxNumber.getValue() ) return; // Restart entity creation double dt = interArrivalTime.getValue().getNextSample(getSimTime()); this.scheduleProcess(dt, 5, new CreateNextEntityTarget(this, "createNextEntity")); } /** * Loop recursively to generate each entity */ public void createNextEntity() { // Do any of the thresholds stop the generator? for( Threshold thr : this.getThresholds() ) { if( thr.isClosed() ) { busy = false; return; } } busy = true; // Create the new entity numberGenerated++; DisplayEntity proto = prototypeEntity.getValue(); StringBuilder sb = new StringBuilder(); sb.append(proto.getInputName()).append("_Copy").append(numberGenerated); DisplayEntity ent = InputAgent.defineEntityWithUniqueName(proto.getClass(), sb.toString(),"_", true); ent.copyInputs(proto); ent.setFlag(Entity.FLAG_GENERATED); // Send the entity to the next element in the chain this.sendToNextComponent( ent ); // Stop if the last entity been generated if( maxNumber.getValue() != null && numberGenerated >= maxNumber.getValue() ) return; // Schedule the next entity to be generated double dt = interArrivalTime.getValue().getNextSample(getSimTime()); this.scheduleProcess(dt, 5, new CreateNextEntityTarget(this, "createNextEntity")); } @Output(name = "NumberGenerated", description = "The number of entities generated by this generator.", unitType = DimensionlessUnit.class) public Double getNumberGenerated(double simTime) { return (double)numberGenerated; } }
src/main/java/com/jaamsim/BasicObjects/EntityGenerator.java
/* * JaamSim Discrete Event Simulation * Copyright (C) 2013 Ausenco Engineering Canada Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. */ package com.jaamsim.BasicObjects; import com.jaamsim.Samples.SampleConstant; import com.jaamsim.Samples.SampleExpInput; import com.jaamsim.Thresholds.Threshold; import com.jaamsim.input.InputAgent; import com.jaamsim.input.Keyword; import com.jaamsim.input.Output; import com.jaamsim.units.DimensionlessUnit; import com.jaamsim.units.TimeUnit; import com.sandwell.JavaSimulation.Entity; import com.sandwell.JavaSimulation.EntityInput; import com.sandwell.JavaSimulation.EntityTarget; import com.sandwell.JavaSimulation.InputErrorException; import com.sandwell.JavaSimulation.IntegerInput; import com.sandwell.JavaSimulation3D.DisplayEntity; /** * EntityGenerator creates sequence of DisplayEntities at random intervals, which are placed in a target Queue. */ public class EntityGenerator extends LinkedComponent { @Keyword(description = "The arrival time for the first generated entity.\n" + "A constant value, a distribution to be sampled, or a time series can be entered.", example = "EntityGenerator1 FirstArrivalTime { 1.0 h }") private final SampleExpInput firstArrivalTime; @Keyword(description = "The inter-arrival time between generated entities.\n" + "A constant value, a distribution to be sampled, or a time series can be entered.", example = "EntityGenerator1 InterArrivalTime { 1.5 h }") private final SampleExpInput interArrivalTime; @Keyword(description = "The prototype for entities to be generated.\n" + "The generated entities will be copies of this entity.", example = "EntityGenerator1 PrototypeEntity { Ship }") private final EntityInput<DisplayEntity> prototypeEntity; @Keyword(description = "The maximum number of entities to be generated.\n" + "Default is no limit.", example = "EntityGenerator1 MaxNumber { 3 }") private final IntegerInput maxNumber; private int numberGenerated = 0; // Number of entities generated so far private boolean busy; { firstArrivalTime = new SampleExpInput( "FirstArrivalTime", "Key Inputs", new SampleConstant(TimeUnit.class, 0.0)); firstArrivalTime.setUnitType( TimeUnit.class ); firstArrivalTime.setEntity(this); this.addInput( firstArrivalTime); interArrivalTime = new SampleExpInput( "InterArrivalTime", "Key Inputs", new SampleConstant(TimeUnit.class, 1.0)); interArrivalTime.setUnitType( TimeUnit.class ); interArrivalTime.setEntity(this); this.addInput( interArrivalTime); prototypeEntity = new EntityInput<DisplayEntity>( DisplayEntity.class, "PrototypeEntity", "Key Inputs", null); this.addInput( prototypeEntity); maxNumber = new IntegerInput( "MaxNumber", "Key Inputs", null); maxNumber.setValidRange(1, Integer.MAX_VALUE); this.addInput( maxNumber); } public EntityGenerator() { } @Override public void validate() { super.validate(); // Confirm that probability distribution has been specified if( interArrivalTime.getValue() == null ) { throw new InputErrorException( "The keyword InterArrivalTime must be set." ); } // Confirm that prototype entity has been specified if( prototypeEntity.getValue() == null ) { throw new InputErrorException( "The keyword PrototypeEntity must be set." ); } interArrivalTime.verifyUnit(); } @Override public void earlyInit() { super.earlyInit(); numberGenerated = 0; busy = false; } @Override public void startUp() { super.startUp(); // Generate the first entity and start the recursive loop to continue the process double dt = firstArrivalTime.getValue().getNextSample(0.0); this.scheduleProcess(dt, 5, new CreateNextEntityTarget(this, "createNextEntity")); } private static class CreateNextEntityTarget extends EntityTarget<EntityGenerator> { public CreateNextEntityTarget(EntityGenerator ent, String method) { super(ent, method); } @Override public void process() { ent.createNextEntity(); } } @Override public void thresholdChanged() { // Is restart required? if (busy) return; // Are all the thresholds satisfied? for( Threshold thr : this.getThresholds() ) { if( thr.isClosed() ) return; } // Has the last entity been generated? if( maxNumber.getValue() != null && numberGenerated >= maxNumber.getValue() ) return; // Restart entity creation double dt = interArrivalTime.getValue().getNextSample(getSimTime()); this.scheduleProcess(dt, 5, new CreateNextEntityTarget(this, "createNextEntity")); } /** * Loop recursively to generate each entity */ public void createNextEntity() { // Do any of the thresholds stop the generator? for( Threshold thr : this.getThresholds() ) { if( thr.isClosed() ) { busy = false; return; } } busy = true; // Create the new entity numberGenerated++; DisplayEntity proto = prototypeEntity.getValue(); StringBuilder sb = new StringBuilder(); sb.append(proto.getInputName()).append("_Copy").append(numberGenerated); DisplayEntity ent = InputAgent.defineEntityWithUniqueName(proto.getClass(), sb.toString(),"_", true); ent.copyInputs(proto); ent.setFlag(Entity.FLAG_GENERATED); // Send the entity to the next element in the chain this.sendToNextComponent( ent ); // Stop if the last entity been generated if( maxNumber.getValue() != null && numberGenerated >= maxNumber.getValue() ) return; // Schedule the next entity to be generated double dt = interArrivalTime.getValue().getNextSample(getSimTime()); this.scheduleProcess(dt, 5, new CreateNextEntityTarget(this, "createNextEntity")); } @Output(name = "NumberGenerated", description = "The number of entities generated by this generator.", unitType = DimensionlessUnit.class) public Double getNumberGenerated(double simTime) { return (double)numberGenerated; } }
JS: Remove unnecessary validation for EntityGenerator Signed-off-by: Harry King <[email protected]> Signed-off-by: Harvey Harrison <[email protected]>
src/main/java/com/jaamsim/BasicObjects/EntityGenerator.java
JS: Remove unnecessary validation for EntityGenerator
<ide><path>rc/main/java/com/jaamsim/BasicObjects/EntityGenerator.java <ide> @Override <ide> public void validate() { <ide> super.validate(); <del> <del> // Confirm that probability distribution has been specified <del> if( interArrivalTime.getValue() == null ) { <del> throw new InputErrorException( "The keyword InterArrivalTime must be set." ); <del> } <ide> <ide> // Confirm that prototype entity has been specified <ide> if( prototypeEntity.getValue() == null ) {